cengal.hardware.memory.shared_memory.versions.v_1.generated_optimized_shared_memory

Module Docstring Docstrings: http://www.python.org/dev/peps/pep-0257/

   1#!/usr/bin/env python
   2# coding=utf-8
   3
   4# Copyright © 2012-2024 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>
   5# 
   6# Licensed under the Apache License, Version 2.0 (the "License");
   7# you may not use this file except in compliance with the License.
   8# You may obtain a copy of the License at
   9# 
  10#     http://www.apache.org/licenses/LICENSE-2.0
  11# 
  12# Unless required by applicable law or agreed to in writing, software
  13# distributed under the License is distributed on an "AS IS" BASIS,
  14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15# See the License for the specific language governing permissions and
  16# limitations under the License.
  17
  18
  19# __all__ = ['SharedMemory', 'QueueType', 'Offset', 'Size', 'SharedMemoryError',
  20#            'WrongObjectTypeError', 'NoMessagesInQueueError',
  21#            'nearest_size', 'nsize', 'TBase', 'IList', 'codec_by_type', 'get_in_line', 'wait_my_turn']
  22
  23
  24"""
  25Module Docstring
  26Docstrings: http://www.python.org/dev/peps/pep-0257/
  27"""
  28
  29__author__ = "ButenkoMS <gtalk@butenkoms.space>"
  30__copyright__ = "Copyright © 2012-2024 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>"
  31__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ]
  32__license__ = "Apache License, Version 2.0"
  33__version__ = "4.4.1"
  34__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>"
  35__email__ = "gtalk@butenkoms.space"
  36# __status__ = "Prototype"
  37__status__ = "Development"
  38# __status__ = "Production"
  39
  40
  41from cengal.introspection.inspect import is_callable, is_descriptor, is_async
  42from cengal.math.numbers import RationalNumber
  43from cengal.hardware.memory.barriers import full_memory_barrier, mm_pause
  44from cengal.time_management.cpu_clock import cpu_clock
  45from cengal.time_management.high_precision_sync_sleep import hps_sleep
  46from cengal.time_management.sleep_tools import sleep
  47from cengal.introspection.inspect import pdi, pifrl, intro_func_repr_limited
  48from cengal.system import OS_TYPE
  49from cengal.file_system.file_manager import file_exists
  50from cengal.data_manipulation.conversion.binary import bint_to_bytes, bytes_to_bint
  51from cengal.introspection.inspect import is_setable_data_descriptor
  52# from .compilable import write_uint64 as write_uint64_c, read_uint64 as read_uint64_c, write_int64, read_int64, write_double, read_double, zero_memory
  53from .compilable import write_uint64, read_uint64, read_uint8, write_int64, read_int64, write_double, read_double, \
  54    zero_memory, list__get_item, list__get_item_as_offset, list__set_item, list__set_item_as_offset, mask_least_significant_bits
  55
  56import os
  57import asyncio
  58import pickle
  59import ctypes
  60import numpy as np
  61from datetime import datetime, timedelta, timezone, date, time
  62from decimal import Decimal
  63from enum import IntEnum
  64from multiprocessing.shared_memory import SharedMemory as MultiprocessingSharedMemory
  65from array import array
  66from inspect import isclass, ismodule, getattr_static
  67from contextlib import contextmanager
  68from pathlib import PurePath
  69from math import log2, ceil
  70from pickle import dumps as pickle_dumps, loads as pickle_loads
  71from inspect import isfunction, ismethod, isclass, ismethoddescriptor
  72from collections.abc import Sequence as AbsSequence, MutableSequence as AbsMutableSequence, Set as AbsSet, \
  73    MutableSet as AbsMutableSet, Mapping as AbsMapping, MutableMapping as AbsMutableMapping
  74try:
  75    from torch import Tensor, from_numpy
  76except ImportError:
  77    class Tensor:
  78        def numpy(self) -> np.ndarray:
  79            raise NotImplementedError
  80    
  81    def from_numpy(numpy_ndarray: np.ndarray) -> Tensor:
  82        raise NotImplementedError
  83
  84from types import FrameType, CodeType
  85from typing import Any, Tuple, Optional, List, Dict, Set, FrozenSet, AbstractSet, Type, Union, Sequence, cast, Hashable, Coroutine
  86
  87
  88DEBUG = False
  89
  90
  91current_shared_memory_instance: 'SharedMemory' = None
  92
  93
  94# def write_uint64(base_address: int, offset: int, value: int):
  95#     if current_shared_memory_instance is not None:
  96#         if 460 <= offset <= 564:
  97#             print('write_uint64: offset_to_be_monitored: offset: {}, value: {}'.format(offset, value))
  98        
  99#     write_uint64_cython(base_address, offset, value)
 100
 101
 102# def write_uint64(base_address: int, offset: int, value: int):
 103#     if current_shared_memory_instance is None:
 104#         return write_uint64_c(base_address, offset, value)
 105#     else:
 106#         return current_shared_memory_instance.write_uint64(offset, value)
 107
 108# def read_uint64(base_address: int, offset: int) -> int:
 109#     if current_shared_memory_instance is None:
 110#         return read_uint64_c(base_address, offset)
 111#     else:
 112#         return current_shared_memory_instance.read_uint64(offset)
 113
 114
 115class QueueType(IntEnum):
 116    fifo = 0
 117    lifo = 1
 118
 119
 120class ObjectType(IntEnum):
 121    tfree_memory = 0
 122    tmessage = 1
 123    tnone = 2
 124    tbool = 3
 125    tint = 4
 126    tfloat = 5
 127    tcomplex = 6
 128    tstr = 7
 129    tbytes = 8
 130    tbytearray = 9
 131    ttuple = 10
 132    tlist = 11
 133    tmutableset = 12
 134    tset = 13
 135    tmutablemapping = 14
 136    tmapping = 15
 137    tfastdict = 16
 138    tclass = 17
 139    tpickable = 18
 140    tinternal_list = 19
 141    tsmallint = 20
 142    tbigint = 21
 143    tgeneralobject = 22
 144    tnumpyndarray = 23
 145    ttorchtensor = 24
 146    tstaticobject = 25
 147    tfastset = 26
 148    tslice = 27
 149    tdecimal = 28
 150    tdatetime = 29
 151    tstaticobjectwithslots = 30
 152
 153
 154class SysValuesOffsets(IntEnum):
 155    total_mem_size = 0
 156    data_start_offset = 1
 157    data_size = 2
 158    data_end_offset = 3
 159    free_memory_search_start = 4
 160    first_message_offset = 5
 161    last_message_offset = 6
 162    creator_in_charge = 7
 163    consumer_in_charge = 8
 164    creator_wants_to_be_in_charge = 9
 165    consumer_wants_to_be_in_charge = 10
 166    creator_ready = 11
 167    consumer_ready = 12
 168
 169
 170Offset = int
 171Size = int
 172minimal_memory_block_size = 8
 173block_size = minimal_memory_block_size
 174bs = block_size
 175
 176
 177class SharedMemoryError(Exception):
 178    pass
 179
 180
 181class OperationTimedOutError(SharedMemoryError):
 182    pass
 183
 184
 185class FreeMemoryChunkNotFoundError(SharedMemoryError):
 186    """Indicates that an unpartitioned chunk of free memory of requested size not being found.
 187
 188        Regarding this error, it’s important to adjust the size parameter in the SharedMemory configuration. Trying to estimate memory consumption down to the byte is not practical because it fails to account for the memory overhead required by each entity stored (such as entity type metadata, pointers to child entities, etc.).
 189
 190        When setting the size parameter for SharedMemory, consider using broader units like tens (for embedded systems), hundreds, or thousands of megabytes, rather than precise byte counts. This approach is similar to how you would not precisely calculate the amount of memory needed for a web server hosted externally; you make an educated guess, like assuming that 256 MB might be insufficient but 768 MB could be adequate, and then adjust based on practical testing.
 191
 192        Also, be aware of memory fragmentation, which affects all memory allocation systems, including the OS itself. For example, if you have a SharedMemory pool sized to store exactly ten 64-bit integers, accounting for additional bytes for system information, your total might be around 200 bytes. Initially, after storing the integers, your memory might appear as ["int", "int", ..., "int"]. If you delete every second integer, the largest contiguous free memory chunk could be just 10 bytes, despite having 50 bytes free in total. This fragmentation means you cannot store a larger data structure like a 20-byte string which needs contiguous space.
 193
 194        To resolve this, simply increase the size parameter value of SharedMemory. This is akin to how you would manage memory allocation for server hosting or thread stack sizes in software development.
 195
 196    Args:
 197        SharedMemoryError (_type_): _description_
 198    """
 199    pass
 200
 201
 202class ObjBufferIsSmallerThanRequestedNumpyArrayError(SharedMemoryError):
 203    pass
 204
 205
 206class WrongObjectTypeError(SharedMemoryError):
 207    pass
 208
 209
 210class NoMessagesInQueueError(SharedMemoryError):
 211    pass
 212
 213
 214def nearest_size(size: Size) -> Size:
 215    return ((size // 8) * 8 + 8) if size % 8 else size
 216
 217
 218nsize = nearest_size
 219
 220
 221class BaseIObject:
 222    pass
 223
 224
 225# TODO: add next fields: obj_id (simple int index; need to identify object in shared memory); ref_count (simple int counter; need to count references to object. Howerver this field can be moved to shared memory dict with all objects properties like ref_count, etc.)
 226class BaseObjOffsets(IntEnum):
 227    obj_type = 0
 228    obj_size = 1
 229
 230
 231BaseObjOffsetsLen: int = 2  # 2
 232bsBaseObjOffsetsLen: int = 16  # 8 * 2 = 16
 233
 234
 235class TBase:
 236    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
 237        raise NotImplementedError
 238    
 239    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
 240        raise NotImplementedError
 241    
 242    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 243        raise NotImplementedError
 244    
 245    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
 246        raise NotImplementedError
 247    
 248    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
 249        raise NotImplementedError
 250
 251
 252# ======================================================================================================================
 253# === None =====================================================================================================
 254
 255
 256class TNone:
 257    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: None) -> Tuple[None, Offset, Size]:
 258        offset, real_size = shared_memory.malloc(ObjectType.tnone, 0)
 259        return obj, offset, real_size
 260    
 261    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
 262        if ObjectType.tnone != read_uint64(shared_memory.base_address, offset):
 263            raise WrongObjectTypeError
 264
 265        return None
 266    
 267    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 268        if ObjectType.tnone != read_uint64(shared_memory.base_address, offset):
 269            raise WrongObjectTypeError
 270
 271        shared_memory.free(offset)
 272
 273
 274# ======================================================================================================================
 275# === Int =====================================================================================================
 276
 277
 278class IntOffsets(IntEnum):
 279    data = 0
 280
 281
 282class TInt:
 283    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
 284        offset, real_size = shared_memory.malloc(ObjectType.tint, 8)
 285        write_int64(shared_memory.base_address, offset + 16 + 0, obj)
 286        return obj, offset, real_size
 287    
 288    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
 289        if ObjectType.tint != read_uint64(shared_memory.base_address, offset + 0):
 290            raise WrongObjectTypeError
 291
 292        return read_int64(shared_memory.base_address, offset + 16 + 0)
 293    
 294    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 295        if ObjectType.tint != read_uint64(shared_memory.base_address, offset + 0):
 296            raise WrongObjectTypeError
 297
 298        shared_memory.free(offset)
 299
 300
 301# ======================================================================================================================
 302# === SmallInt =====================================================================================================
 303
 304
 305class SmallInt(int):
 306    ...
 307
 308
 309smallint = SmallInt
 310sint = SmallInt
 311
 312
 313class SmallIntOffsets(IntEnum):
 314    data = 0
 315
 316
 317class TSmallInt:
 318    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
 319        offset, real_size = shared_memory.malloc(ObjectType.tsmallint, 8)
 320        write_int64(shared_memory.base_address, offset + 16 + 0, obj)
 321        return obj, offset, real_size
 322    
 323    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
 324        if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + 0):
 325            raise WrongObjectTypeError
 326
 327        return read_int64(shared_memory.base_address, offset + 16 + 0)
 328    
 329    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 330        if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + 0):
 331            raise WrongObjectTypeError
 332
 333        shared_memory.free(offset)
 334
 335
 336# ======================================================================================================================
 337# === LargeInt =====================================================================================================
 338
 339
 340class BigInt(int):
 341    ...
 342
 343
 344bigint = BigInt
 345bint = BigInt
 346
 347
 348class BigIntOffsets(IntEnum):
 349    data_size = 0
 350    data = 1
 351
 352
 353class TBigInt:
 354    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
 355        data = bint_to_bytes(obj)
 356        data_size = len(data)
 357        # offset, real_size = shared_memory.malloc(ObjectType.tbigint, 16 + 8 * data_size)
 358        offset, real_size = shared_memory.malloc(ObjectType.tbigint, 16 + data_size)
 359        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
 360        data_offset = offset + 16 + 8
 361        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
 362        return obj, offset, real_size
 363    
 364    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
 365        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
 366            raise WrongObjectTypeError
 367
 368        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 369        if data_size:
 370            data_offset = offset + 16 + 8
 371            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
 372            return bytes_to_bint(data)
 373        else:
 374            return 0
 375    
 376    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 377        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
 378            raise WrongObjectTypeError
 379
 380        shared_memory.free(offset)
 381    
 382    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
 383        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
 384            raise WrongObjectTypeError
 385
 386        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 387        data_offset = offset + 16 + 8
 388        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
 389    
 390    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
 391        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
 392            raise WrongObjectTypeError
 393
 394        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 395        data_offset = offset + 16 + 8
 396        return data_offset, data_size
 397
 398
 399# ======================================================================================================================
 400# === Bool =====================================================================================================
 401
 402
 403class BoolOffsets(IntEnum):
 404    data = 0
 405
 406
 407class TBool:
 408    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bool) -> Tuple[bool, Offset, Size]:
 409        offset, real_size = shared_memory.malloc(ObjectType.tbool, 8)
 410        write_uint64(shared_memory.base_address, offset + 16 + 0, int(obj))
 411        return obj, offset, real_size
 412    
 413    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bool:
 414        if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + 0):
 415            raise WrongObjectTypeError
 416
 417        return bool(read_uint64(shared_memory.base_address, offset + 16 + 0))
 418    
 419    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 420        if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + 0):
 421            raise WrongObjectTypeError
 422
 423        shared_memory.free(offset)
 424
 425
 426# ======================================================================================================================
 427# === Float =====================================================================================================
 428
 429
 430class FloatOffsets(IntEnum):
 431    data = 0
 432
 433
 434class TFloat:
 435    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: float) -> Tuple[float, Offset, Size]:
 436        offset, real_size = shared_memory.malloc(ObjectType.tfloat, 8)
 437        write_double(shared_memory.base_address, offset + 16 + 0, obj)
 438        return obj, offset, real_size
 439    
 440    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> float:
 441        if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset):
 442            raise WrongObjectTypeError
 443
 444        return read_double(shared_memory.base_address, offset + 16 + 0)
 445    
 446    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 447        if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset):
 448            raise WrongObjectTypeError
 449
 450        shared_memory.free(offset)
 451
 452
 453# ======================================================================================================================
 454# === Bytes =====================================================================================================
 455
 456
 457class BytesOffsets(IntEnum):
 458    data_size = 0
 459    data = 1
 460
 461
 462class TBytes:
 463    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]:
 464        data_size = len(obj)
 465        # offset, real_size = shared_memory.malloc(ObjectType.tbytes, 16 + 8 * data_size)
 466        offset, real_size = shared_memory.malloc(ObjectType.tbytes, 16 + data_size)
 467        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
 468        data_offset = offset + 16 + 8
 469        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj
 470        return obj, offset, real_size
 471    
 472    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes:
 473        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
 474            raise WrongObjectTypeError
 475
 476        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 477        if data_size:
 478            data_offset = offset + 16 + 8
 479            obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
 480            return obj
 481        else:
 482            return bytes()
 483    
 484    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 485        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
 486            raise WrongObjectTypeError
 487
 488        shared_memory.free(offset)
 489    
 490    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
 491        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
 492            raise WrongObjectTypeError
 493
 494        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 495        data_offset = offset + 16 + 8
 496        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
 497    
 498    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
 499        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
 500            raise WrongObjectTypeError
 501
 502        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 503        data_offset = offset + 16 + 8
 504        return data_offset, data_size
 505
 506
 507# class TBytes:
 508#     def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]:
 509#         data_size = len(obj)
 510#         if 0 == data_size:
 511#             allocated_data_size = 1
 512#         else:
 513#             allocated_data_size = data_size
 514        
 515#         # offset, real_size = shared_memory.malloc(ObjectType.tbytes, 8 * (2 - 1) + 8 * allocated_data_size)
 516#         offset, real_size = shared_memory.malloc(ObjectType.tbytes, 8 * (2 - 1) + allocated_data_size)
 517#         shared_memory.print_mem(offset, 100, f'TBytes.map_to_shared_memory 0: offset: {offset}, real_size: {real_size}')
 518#         write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
 519#         shared_memory.print_mem(offset, 100, f'TBytes.map_to_shared_memory 1: offset: {offset}, real_size: {real_size}')
 520#         data_offset = offset + 16 + 8
 521#         if data_size:
 522#             try:
 523#                 shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj
 524#             except ValueError:
 525#                 print(len(shared_memory._shared_memory.buf[data_offset:data_offset + data_size]), shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
 526#                 print(len(obj), obj)
 527#                 raise
 528            
 529#             shared_memory.print_mem(offset, 100, f'TBytes.map_to_shared_memory 2: offset: {offset}, real_size: {real_size}, data_size: {data_size}, data_offset: {data_offset}')
 530        
 531#         return obj, offset, real_size
 532    
 533#     def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes:
 534#         if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
 535#             raise WrongObjectTypeError
 536
 537#         data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 538#         data_offset = offset + 16 + 8
 539#         shared_memory.print_mem(offset, 100, f'TBytes.init_from_shared_memory 0: offset: {offset}, data_size: {data_size}, data_offset: {data_offset}')
 540#         if data_size:
 541#             obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
 542#         else:
 543#             obj = b''
 544        
 545#         return obj
 546    
 547#     def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 548#         shared_memory.free(offset)
 549
 550
 551# ======================================================================================================================
 552# === Bytearray =====================================================================================================
 553
 554
 555class BytearrayOffsets(IntEnum):
 556    data_size = 0
 557    data = 1
 558
 559
 560class TBytearray:
 561    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytearray) -> Tuple[bytearray, Offset, Size]:
 562        data = bytes(obj)
 563        data_size = len(data)
 564        # offset, real_size = shared_memory.malloc(ObjectType.tbytearray, 16 + 8 * data_size)
 565        offset, real_size = shared_memory.malloc(ObjectType.tbytearray, 16 + data_size)
 566        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
 567        data_offset = offset + 16 + 8
 568        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
 569        return obj, offset, real_size
 570    
 571    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytearray:
 572        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
 573            raise WrongObjectTypeError
 574
 575        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 576        if data_size:
 577            data_offset = offset + 16 + 8
 578            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
 579            return bytearray(data)
 580        else:
 581            return bytearray(bytes())
 582    
 583    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 584        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
 585            raise WrongObjectTypeError
 586
 587        shared_memory.free(offset)
 588    
 589    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
 590        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
 591            raise WrongObjectTypeError
 592
 593        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 594        data_offset = offset + 16 + 8
 595        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
 596    
 597    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
 598        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
 599            raise WrongObjectTypeError
 600
 601        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 602        data_offset = offset + 16 + 8
 603        return data_offset, data_size
 604
 605
 606# ======================================================================================================================
 607# === Str =====================================================================================================
 608
 609
 610class StrOffsets(IntEnum):
 611    data_size = 0
 612    data = 1
 613
 614
 615class TStr:
 616    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: str) -> Tuple[str, Offset, Size]:
 617        data = str.encode(obj)
 618        data_size = len(data)
 619        # offset, real_size = shared_memory.malloc(ObjectType.tstr, 16 + 8 * data_size)
 620        offset, real_size = shared_memory.malloc(ObjectType.tstr, 16 + data_size)
 621        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
 622        data_offset = offset + 16 + 8
 623        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
 624        return obj, offset, real_size
 625    
 626    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> str:
 627        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
 628            raise WrongObjectTypeError
 629
 630        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 631        if data_size:
 632            data_offset = offset + 16 + 8
 633            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
 634            return data.decode()
 635        else:
 636            return str()
 637    
 638    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
 639        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
 640            raise WrongObjectTypeError
 641
 642        shared_memory.free(offset)
 643    
 644    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
 645        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
 646            raise WrongObjectTypeError
 647
 648        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 649        data_offset = offset + 16 + 8
 650        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
 651    
 652    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
 653        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
 654            raise WrongObjectTypeError
 655
 656        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
 657        data_offset = offset + 16 + 8
 658        return data_offset, data_size
 659
 660
 661# ======================================================================================================================
 662# === ListTrue =========================================================================================================
 663# An old preoptimized version with a bunch of issues and bugs due to the wrong offsets. Deprecated. Use IList instead
 664
 665
 666class InternalListTrueOffsets(IntEnum):
 667    capacity = 0
 668    size = 1
 669
 670
 671def malloc_tinternal_list_true(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]:
 672    capacity = (size << 1 if size else 16) if capacity is None else capacity
 673    datas_sys_part_size = 8 * len(InternalListTrueOffsets)
 674    offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, datas_sys_part_size + 8 * capacity)
 675    data_offset = offset + datas_sys_part_size
 676    write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, capacity)
 677    write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size, size)
 678    return offset, real_size
 679
 680
 681def realloc_tinternal_list_true(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]:
 682    datas_sys_part_size = 8 * len(InternalListTrueOffsets)
 683    data_offset = offset + datas_sys_part_size
 684    capacity = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity)
 685    size = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size)
 686    new_list_capacity = capacity << 1 if new_capacity is None else new_capacity
 687    if new_capacity is None:
 688        if desired_size is None:
 689            new_list_capacity = capacity << 1 if capacity else 16
 690        else:
 691            new_list_capacity = desired_size << 1 if desired_size else 16
 692    else:
 693        new_list_capacity = new_capacity
 694    
 695    if new_list_capacity < size:
 696        new_list_capacity = size
 697    
 698    new_offset, new_real_size = shared_memory.realloc(offset, datas_sys_part_size + 8 * new_list_capacity, loop_allowed, zero_mem)
 699    data_offset = new_offset + datas_sys_part_size
 700    write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, new_list_capacity)
 701    return new_offset, new_real_size
 702
 703
 704class IListTrue(BaseIObject, list):
 705    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None:
 706        self._shared_memory = shared_memory
 707        self._base_address = shared_memory.base_address
 708        if offset is None:
 709            offset, real_size = shared_memory.malloc(ObjectType.tlist, 8)
 710            self._offset = offset
 711            self._offset__data = offset + 8 * 2
 712            self._offset__pointer_to_internal_list = self._offset__data
 713            
 714            if obj is None:
 715                obj = list()
 716            
 717            data_len = len(obj)
 718            capacity_len = data_len << 1 if data_len else 16
 719            internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len, capacity_len)
 720            self._pointer_to_internal_list = internal_list_offset
 721            for i, item in enumerate(obj):
 722                item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item)
 723                write_uint64(self._base_address, self._item_offset(i), item_offset)
 724        else:
 725            self._offset = offset
 726            self._offset__data = offset + 8 * 2
 727            self._offset__pointer_to_internal_list = self._offset__data
 728    
 729    def raw_to_bytes(self, bytes_num: int) -> bytes:
 730        start_index = self._pointer_to_internal_list
 731        return self._shared_memory.read_mem(start_index, bytes_num)
 732        # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num])
 733    
 734    @property
 735    def _obj_size(self):
 736        return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size)
 737    
 738    @property
 739    def _pointer_to_internal_list(self):
 740        return read_uint64(self._base_address, self._offset__pointer_to_internal_list)
 741
 742    @_pointer_to_internal_list.setter
 743    def _pointer_to_internal_list(self, value: Offset):
 744        write_uint64(self._base_address, self._offset__pointer_to_internal_list, value)
 745
 746    @property
 747    def _list_len(self):
 748        return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * 2 + 8 * InternalListTrueOffsets.size)
 749    
 750    @_list_len.setter
 751    def _list_len(self, value: int):
 752        write_uint64(self._base_address, self._pointer_to_internal_list + 8 * 2 + 8 * InternalListTrueOffsets.size, value)
 753
 754    @property
 755    def _list_capacity(self):
 756        return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * 2 + 8 * InternalListTrueOffsets.capacity)
 757    
 758    def _item_offset(self, key: int) -> Offset:
 759        return self._pointer_to_internal_list + 8 * 2 + 8 * len(InternalListTrueOffsets) + key * 8
 760    
 761    def __len__(self) -> int:
 762        return self._list_len
 763    
 764    def get_children_offsets(self) -> List[Offset]:
 765        return [read_uint64(self._base_address, self._item_offset(i)) for i in range(self._list_len)]
 766    
 767    def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]:
 768        if isinstance(key, int):
 769            if key < 0:
 770                key += len(self)
 771            if key < 0 or key >= len(self):
 772                raise IndexError
 773
 774            item_offset = read_uint64(self._base_address, self._item_offset(key))
 775            return self._shared_memory.get_obj(item_offset)
 776        elif isinstance(key, slice):
 777            if key.step is not None:
 778                raise NotImplementedError
 779            
 780            if key.start is None:
 781                start = 0
 782            elif key.start < 0:
 783                start = key.start + len(self)
 784            else:
 785                start = key.start
 786            
 787            if key.stop is None:
 788                stop = len(self)
 789            elif key.stop < 0:
 790                stop = key.stop + len(self)
 791            else:
 792                stop = key.stop
 793            
 794            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
 795                raise IndexError
 796            
 797            result_list = list()
 798            for i in range(start, stop):
 799                item_offset = read_uint64(self._base_address, self._item_offset(i))
 800                result_list.append(self._shared_memory.get_obj(item_offset))
 801            return result_list
 802        else:
 803            raise TypeError
 804    
 805    def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence]) -> Any:
 806        if isinstance(key, int):
 807            if key < 0:
 808                key += len(self)
 809            if key < 0 or key >= len(self):
 810                raise IndexError
 811
 812            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
 813            write_uint64(self._base_address, self._item_offset(key), item_offset)
 814        elif isinstance(key, slice):
 815            if key.step is not None:
 816                raise NotImplementedError
 817            
 818            if key.start is None:
 819                start = 0
 820            elif key.start < 0:
 821                start = key.start + len(self)
 822            else:
 823                start = key.start
 824            
 825            if key.stop is None:
 826                stop = len(self)
 827            elif key.stop < 0:
 828                stop = key.stop + len(self)
 829            else:
 830                stop = key.stop
 831            
 832            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
 833                raise IndexError
 834            
 835            for i in range(start, stop):
 836                item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value[i - start])
 837                write_uint64(self._base_address, self._item_offset(i), item_offset)
 838        else:
 839            raise TypeError
 840
 841    def __delitem__(self, key: Union[int, slice]) -> None:
 842        if isinstance(key, int):
 843            if key < 0:
 844                key += len(self)
 845            if key < 0 or key >= len(self):
 846                raise IndexError
 847
 848            for i in range(key + 1, len(self)):
 849                item_offset = read_uint64(self._base_address, self._item_offset(i))
 850                self._shared_memory.free(item_offset)
 851                write_uint64(self._base_address, self._item_offset(i - 1), item_offset)
 852            
 853            self._list_len -= 1
 854        elif isinstance(key, slice):
 855            if key.step is not None:
 856                raise NotImplementedError
 857            
 858            if key.start is None:
 859                start = 0
 860            elif key.start < 0:
 861                start = key.start + len(self)
 862            else:
 863                start = key.start
 864            
 865            if key.stop is None:
 866                stop = len(self)
 867            elif key.stop < 0:
 868                stop = key.stop + len(self)
 869            else:
 870                stop = key.stop
 871            
 872            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
 873                raise IndexError
 874            
 875            for i in range(start, stop):
 876                item_offset = read_uint64(self._base_address, self._item_offset(i))
 877                self._shared_memory.free(item_offset)
 878            
 879            del_items_num = stop - start
 880            
 881            for i in range(stop, len(self)):
 882                item_offset = read_uint64(self._base_address, self._item_offset(i))
 883                write_uint64(self._base_address, self._item_offset(i - del_items_num), item_offset)
 884            
 885            self._list_len -= del_items_num
 886        else:
 887            raise TypeError
 888    
 889    def append(self, item: Any) -> None:
 890        if self._list_len > self._list_capacity:
 891            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
 892
 893        item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
 894        write_uint64(self._base_address, self._item_offset(self._list_len), item_offset)
 895        self._list_len += 1
 896
 897    def extend(self, items: Sequence) -> None:
 898        items_num = len(items)
 899        if self._list_len + items_num > self._list_capacity:
 900            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
 901
 902        for i, item in enumerate(items):
 903            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
 904            write_uint64(self._base_address, self._item_offset(self._list_len + i), item_offset)
 905        
 906        self._list_len += items_num
 907    
 908    def insert(self, index: int, item: Any) -> None:
 909        if index < 0:
 910            index += len(self)
 911        if index < 0 or index > len(self):
 912            raise IndexError
 913
 914        if self._list_len > self._list_capacity:
 915            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
 916
 917        for i in range(self._list_len, index, -1):
 918            item_offset = read_uint64(self._base_address, self._item_offset(i - 1))
 919            write_uint64(self._base_address, self._item_offset(i), item_offset)
 920        
 921        item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
 922        write_uint64(self._base_address, self._item_offset(index), item_offset)
 923        self._list_len += 1
 924    
 925    def pop(self, index: int = -1) -> Any:
 926        if index < 0:
 927            index += len(self)
 928        if index < 0 or index >= len(self):
 929            raise IndexError
 930
 931        item_offset = read_uint64(self._base_address, self._item_offset(index))
 932        result = self._shared_memory.get_obj(item_offset)
 933        
 934        for i in range(index + 1, len(self)):
 935            item_offset = read_uint64(self._base_address, self._item_offset(i))
 936            write_uint64(self._base_address, self._item_offset(i - 1), item_offset)
 937        
 938        self._list_len -= 1
 939        return result
 940    
 941    def remove(self, item: Any) -> None:
 942        for i in range(len(self)):
 943            item_offset = read_uint64(self._base_address, self._item_offset(i))
 944            if item_offset == item._offset:
 945                for j in range(i + 1, len(self)):
 946                    item_offset = read_uint64(self._base_address, self._item_offset(j))
 947                    write_uint64(self._base_address, self._item_offset(j - 1), item_offset)
 948                
 949                self._list_len -= 1
 950                return
 951        
 952        raise ValueError
 953    
 954    def clear(self) -> None:
 955        for i in range(len(self)):
 956            item_offset = read_uint64(self._base_address, self._item_offset(i))
 957            self._shared_memory.free(item_offset)
 958        
 959        self._list_len = 0
 960    
 961    def __iter__(self):
 962        return IListIterator(self)
 963    
 964    def __reversed__(self):
 965        return IListReversedIterator(self)
 966    
 967    def __contains__(self, item: Any) -> bool:
 968        for i in range(len(self)):
 969            item_offset = read_uint64(self._base_address, self._item_offset(i))
 970            if item_offset == item._offset:
 971                return True
 972        
 973        return False
 974    
 975    def index(self, item: Any, start: int = 0, stop: int = None) -> int:
 976        if stop is None:
 977            stop = len(self)
 978        
 979        for i in range(start, stop):
 980            item_offset = read_uint64(self._base_address, self._item_offset(i))
 981            if item_offset == item._offset:
 982                return i
 983        
 984        raise ValueError
 985    
 986    def count(self, item: Any) -> int:
 987        result = 0
 988        for i in range(len(self)):
 989            item_offset = read_uint64(self._base_address, self._item_offset(i))
 990            if item_offset == item._offset:
 991                result += 1
 992        
 993        return result
 994    
 995    def reverse(self) -> None:
 996        for i in range(len(self) // 2):
 997            item_offset = read_uint64(self._base_address, self._item_offset(i))
 998            write_uint64(self._base_address, self._item_offset(i), read_uint64(self._base_address, self._item_offset(len(self) - i - 1)))
 999            write_uint64(self._base_address, self._item_offset(len(self) - i - 1), item_offset)
1000    
1001    def sort(self, key: Any = None, reverse: bool = False) -> None:
1002        raise NotImplementedError
1003    
1004    def copy(self) -> 'IList':
1005        result = IList(self._shared_memory)
1006        result.extend(self)
1007        return result
1008    
1009    def __add__(self, other: Sequence) -> 'IList':
1010        result = IList(self._shared_memory)
1011        result.extend(self)
1012        result.extend(other)
1013        return result
1014    
1015    def __iadd__(self, other: Sequence) -> 'IList':
1016        self.extend(other)
1017        return self
1018    
1019    def __mul__(self, other: int) -> 'IList':
1020        result = IList(self._shared_memory)
1021        for i in range(other):
1022            result.extend(self)
1023        
1024        return result
1025    
1026    def __imul__(self, other: int) -> 'IList':
1027        my_copy: IList = self.copy()
1028        for i in range(other):
1029            self.extend(my_copy)
1030        
1031        return self
1032    
1033    def __rmul__(self, other: int) -> 'IList':
1034        return self.__mul__(other)
1035    
1036    def __eq__(self, other: Sequence) -> bool:
1037        if len(self) != len(other):
1038            return False
1039        
1040        for i in range(len(self)):
1041            if self[i] != other[i]:
1042                return False
1043        
1044        return True
1045    
1046    def __ne__(self, other: Sequence) -> bool:
1047        return not self.__eq__(other)
1048    
1049    def __lt__(self, other: Sequence) -> bool:
1050        for i in range(len(self)):
1051            if self[i] >= other[i]:
1052                return False
1053        
1054        return True
1055    
1056    def __le__(self, other: Sequence) -> bool:
1057        for i in range(len(self)):
1058            if self[i] > other[i]:
1059                return False
1060        
1061        return True
1062    
1063    def __gt__(self, other: Sequence) -> bool:
1064        for i in range(len(self)):
1065            if self[i] <= other[i]:
1066                return False
1067        
1068        return True
1069    
1070    def __ge__(self, other: Sequence) -> bool:
1071        for i in range(len(self)):
1072            if self[i] < other[i]:
1073                return False
1074        
1075        return True
1076    
1077    def __repr__(self) -> str:
1078        return f'IList({list(self)})'
1079    
1080    def __str__(self) -> str:
1081        return f'IList({list(self)})'
1082    
1083    def __hash__(self) -> int:
1084        return hash(tuple(self))
1085    
1086    def __sizeof__(self) -> int:
1087        return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size) + read_uint64(self._base_address, self._pointer_to_internal_list, 8 * BaseObjOffsets.obj_size)
1088    
1089    def export(self) -> list:
1090        return list(self)
1091
1092    # def __del__(self) -> None:
1093    #     self._shared_memory.free(self._pointer_to_internal_list)
1094    #     self._shared_memory.free(self._offset)
1095
1096
1097# ======================================================================================================================
1098# === InternalList =====================================================================================================
1099
1100
1101class InternalListOffsets(IntEnum):
1102    capacity = 0
1103    size = 1
1104
1105
1106class InternalListFieldOffsets(IntEnum):
1107    field_type = 0
1108    offset_or_data = 1
1109
1110
1111class InternalListFieldTypes(IntEnum):
1112    tnone = 0
1113    tobj = 1
1114    tint = 2
1115    tfloat = 3
1116    tbool = 4
1117
1118
1119def malloc_tinternal_list(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]:
1120    if (capacity is not None) and (size > capacity):
1121        raise ValueError
1122    
1123    capacity = (size << 1 if size else 16) if capacity is None else capacity
1124    offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, 16 + 16 + capacity * 16, zero_mem=True)
1125    sys_data_offset = offset + 16
1126    write_uint64(shared_memory.base_address, sys_data_offset + 0, capacity)
1127    write_uint64(shared_memory.base_address, sys_data_offset + 8, size)
1128    return offset, real_size
1129
1130
1131def realloc_tinternal_list(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]:
1132    if (desired_size is not None) and (new_capacity is not None) and (desired_size > new_capacity):
1133        raise ValueError
1134    
1135    sys_data_offset = offset + 16
1136    capacity = read_uint64(shared_memory.base_address, sys_data_offset + 0)
1137    size = read_uint64(shared_memory.base_address, sys_data_offset + 8)
1138    new_list_capacity = capacity << 1 if new_capacity is None else new_capacity
1139    if new_capacity is None:
1140        if desired_size is None:
1141            new_list_capacity = capacity << 1 if capacity else 16
1142        else:
1143            new_list_capacity = desired_size << 1 if desired_size else 16
1144    else:
1145        new_list_capacity = new_capacity
1146    
1147    if new_list_capacity < size:
1148        new_list_capacity = size
1149    
1150    if new_list_capacity == capacity:
1151        real_size = read_uint64(shared_memory.base_address, offset + 8)
1152        return offset, real_size
1153
1154    new_offset, new_real_size = shared_memory.realloc(
1155            offset,
1156            16 + new_list_capacity * 16,
1157            loop_allowed,
1158            zero_mem
1159        )
1160    new_sys_data_offset = new_offset + 16
1161    write_uint64(shared_memory.base_address, new_sys_data_offset + 0, new_list_capacity)
1162    return new_offset, new_real_size
1163
1164
1165def destroy_tinternal_list(shared_memory: 'SharedMemory', offset: Offset) -> None:
1166    shared_memory.free(offset)
1167
1168
1169def uint64_to_bytes(int_data: int) -> bytes:
1170    """
1171    For a 64 bit unsigned int in little endian
1172    :param int_data:
1173    :return: bytes(); len == 8
1174    """
1175    from struct import pack
1176    result = pack('<B', int_data)
1177    return result
1178
1179
1180def uint8_to_bytes(int_data: int) -> bytes:
1181    """
1182    For a 64 bit unsigned int in little endian
1183    :param int_data:
1184    :return: bytes(); len == 8
1185    """
1186    from struct import pack
1187    result = pack('<Q', int_data)
1188    return result
1189
1190
1191# ======================================================================================================================
1192# === List =====================================================================================================
1193
1194
1195class ListOffsets(IntEnum):
1196    internal_list_offset = 0
1197
1198
1199class IList(BaseIObject, list):
1200    __slots__ = ('_shared_memory', '_base_address', '_offset', '_offset__data', '_offset__pointer_to_internal_list')
1201
1202    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None:
1203        self._shared_memory = shared_memory
1204        self._base_address = shared_memory.base_address
1205        if offset is None:
1206            offset, real_size = shared_memory.malloc(ObjectType.tlist, 8)
1207            try:
1208                self._offset = offset
1209                self._offset__data = offset + 16
1210                self._offset__pointer_to_internal_list = self._offset__data + 0
1211                
1212                if obj is None:
1213                    obj = list()
1214                
1215                data_len = len(obj)
1216                internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len)
1217                self._pointer_to_internal_list = internal_list_offset
1218                for i, item in enumerate(obj):
1219                    # print(self.get_children_offsets())
1220                    # # print(self.raw_to_list(slice(0, None)))
1221                    # print(self.raw_to_bytes(200))
1222                    self._write_item(i, item)
1223                    # print(self.get_children_offsets())
1224                    # # print(self.raw_to_list(slice(0, None)))
1225                    # print(self.raw_to_bytes(200))
1226                
1227                # print(self.get_children_offsets())
1228                # # print(self.raw_to_list(slice(0, None)))
1229                # print(self.raw_to_bytes(200))
1230                # print('=======================')
1231            except:
1232                self._free_mem()
1233                raise
1234        else:
1235            self._offset = offset
1236            self._offset__data = offset + 16
1237            self._offset__pointer_to_internal_list = self._offset__data + 0
1238    
1239    def raw_to_list(self, key) -> List[bytes]:
1240        if isinstance(key, int):
1241            if key < 0:
1242                key += len(self)
1243            if key < 0 or key >= len(self):
1244                raise IndexError
1245
1246            item_offset = self._read_item_offset_or_data(key)
1247            return [uint64_to_bytes(item_offset)]
1248        elif isinstance(key, slice):
1249            if key.step is not None:
1250                raise NotImplementedError
1251            
1252            if key.start is None:
1253                start = 0
1254            elif key.start < 0:
1255                start = key.start + len(self)
1256            else:
1257                start = key.start
1258            
1259            if key.stop is None:
1260                stop = len(self)
1261            elif key.stop < 0:
1262                stop = key.stop + len(self)
1263            else:
1264                stop = key.stop
1265            
1266            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1267                raise IndexError
1268            
1269            result_list = list()
1270            for i in range(start, stop):
1271                item_offset = self._read_item_offset_or_data(i)
1272                result_list.append(uint64_to_bytes(item_offset))
1273            
1274            return result_list
1275    
1276    def raw_to_bytes(self, bytes_num: int) -> bytes:
1277        start_index = self._pointer_to_internal_list
1278        return self._shared_memory.read_mem(start_index, bytes_num)
1279        # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num])
1280
1281    @property
1282    def _obj_size(self):
1283        return read_uint64(self._base_address, self._offset + 8)
1284    
1285    @property
1286    def _pointer_to_internal_list(self):
1287        return read_uint64(self._base_address, self._offset__pointer_to_internal_list)
1288
1289    @_pointer_to_internal_list.setter
1290    def _pointer_to_internal_list(self, value: Offset):
1291        write_uint64(self._base_address, self._offset__pointer_to_internal_list, value)
1292
1293    @property
1294    def _list_len(self):
1295        return read_uint64(self._base_address, self._pointer_to_internal_list + 16 + 8)
1296    
1297    @_list_len.setter
1298    def _list_len(self, value: int):
1299        write_uint64(self._base_address, self._pointer_to_internal_list + 16 + 8, value)
1300
1301    @property
1302    def _list_capacity(self):
1303        return read_uint64(self._base_address, self._pointer_to_internal_list + 16 + 0)
1304    
1305    def _item_offset(self, key: int) -> Offset:
1306        return self._pointer_to_internal_list + 16 + 16 + key * 16
1307    
1308    def _item_type_offset(self, key: int) -> Offset:
1309        # from os import getpid
1310        result = self._pointer_to_internal_list + 16 + 16 + key * 16 + 0
1311        # add_0 = 16
1312        # add_1 = 16
1313        # add_2 = key * 16
1314        # add_3 = 0
1315        # print(f'PID: {getpid()}. [{add_0},{add_1},{add_2},{add_3}],{add_0 + add_1 + add_2 + add_3},{self._pointer_to_internal_list}: item_type_offset: {key}:{result}')
1316        return result
1317
1318    def _item_value_offset(self, key: int) -> Offset:
1319        # from os import getpid
1320        result = self._pointer_to_internal_list + 16 + 16 + key * 16 + 8
1321        # print(f'PID: {getpid()}. {16 + 16 + key * 16 + 8},{self._pointer_to_internal_list}: item_value_offset: {key}:{result}')
1322        return result
1323
1324    def _read_item_type(self, key: int) -> int:
1325        return read_uint64(self._base_address, self._item_type_offset(key))
1326    
1327    def _write_item_type(self, key: int, item_type: int) -> None:
1328        write_uint64(self._base_address, self._item_type_offset(key), item_type)
1329    
1330    def _read_item_offset_or_data(self, key: int) -> Union[Offset, int]:
1331        return read_uint64(self._base_address, self._item_value_offset(key))
1332
1333    def _write_item_offset_or_data(self, key: int, offset_or_data: Union[Offset, int]) -> None:
1334        write_uint64(self._base_address, self._item_value_offset(key), offset_or_data)
1335    
1336    # def _determine_obj_type(self, obj: Any) -> int:
1337    #     if isinstance(obj, int):
1338    #         return 1
1339    #     elif isinstance(obj, float):
1340    #         return 2
1341    #     elif isinstance(obj, bool):
1342    #         return 3
1343    #     else:
1344    #         return 0
1345    
1346    def _determine_obj_type(self, obj: Any) -> int:
1347        if type(obj) is int:
1348            return 2
1349        elif type(obj) is float:
1350            return 3
1351        elif type(obj) is bool:
1352            return 4
1353        elif obj is None:
1354            return 0
1355        else:
1356            return 1
1357    
1358    def _determine_obj_offset(self, obj: Any) -> Optional[Offset]:
1359        if isinstance(obj, BaseIObject):
1360            return obj._offset
1361        else:
1362            return None
1363    
1364    def _compare_item_to_obj_fast(self, key: int, obj: Any, obj_type: int, obj_offset) -> bool:
1365        result: bool = False
1366        item_type = self._read_item_type(key)
1367        if item_type == obj_type:
1368            if item_type == 1:
1369                if obj_offset is None:
1370                    if self._read_item_value(key, item_type) == obj:
1371                        result = True
1372                else:
1373                    if self._read_item_offset_or_data(key) == obj_offset:
1374                        result = True
1375            elif item_type == 2:
1376                if self._read_item_offset_or_data(key) == obj:
1377                    result = True
1378            elif item_type == 3:
1379                if self._read_item_offset_or_data(key) == obj:
1380                    result = True
1381            elif item_type == 4:
1382                if self._read_item_offset_or_data(key) == obj:
1383                    result = True
1384            elif item_type == 0:
1385                result = obj is None
1386            else:
1387                raise ValueError
1388
1389        return result
1390    
1391    def _compare_item_to_obj(self, key: int, obj: Any) -> bool:
1392        obj_type = self._determine_obj_type(obj)
1393        obj_offset = self._determine_obj_offset(obj)
1394        return self._compare_item_to_obj_fast(key, obj, obj_type, obj_offset)
1395
1396    def _read_item_value(self, key: int, item_type: int) -> Any:
1397        if item_type == 1:
1398            item_offset = read_uint64(self._base_address, self._item_value_offset(key))
1399            return self._shared_memory.get_obj(item_offset)
1400        elif item_type == 2:
1401            return read_int64(self._base_address, self._item_value_offset(key))
1402        elif item_type == 3:
1403            return read_double(self._base_address, self._item_value_offset(key))
1404        elif item_type == 4:
1405            return bool(read_uint64(self._base_address, self._item_value_offset(key)))
1406        elif item_type == 0:
1407            return None
1408        else:
1409            raise ValueError
1410    
1411    def _write_item_value(self, key: int, item_type: int, value: Any) -> None:
1412        if item_type == 1:
1413            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
1414            write_uint64(self._base_address, self._item_value_offset(key), item_offset)
1415        elif item_type == 2:
1416            write_int64(self._base_address, self._item_value_offset(key), value)
1417        elif item_type == 3:
1418            write_double(self._base_address, self._item_value_offset(key), value)
1419        elif item_type == 4:
1420            write_uint64(self._base_address, self._item_value_offset(key), int(value))
1421        elif item_type == 0:
1422            pass
1423        else:
1424            raise ValueError
1425    
1426    def _free_item_value(self, key: int, item_type: int) -> None:
1427        if item_type == 1:
1428            item_offset = read_uint64(self._base_address, self._item_value_offset(key))
1429            # self._shared_memory.free(item_offset)
1430            self._shared_memory.destroy_obj(item_offset)
1431        elif item_type == 2:
1432            pass
1433        elif item_type == 3:
1434            pass
1435        elif item_type == 4:
1436            pass
1437        elif item_type == 0:
1438            pass
1439        else:
1440            raise ValueError
1441
1442        self._write_item_type(key, 0)
1443    
1444    def _read_item_type_and_value(self, key: int) -> Tuple[int, Any]:
1445        item_type = self._read_item_type(key)
1446        return item_type, self._read_item_value(key, item_type)
1447    
1448    def _write_item_value_and_get_type(self, key: int, value: Any) -> int:
1449        if isinstance(value, int):
1450            write_uint64(self._base_address, self._item_value_offset(key), value)
1451            return 2
1452        elif isinstance(value, float):
1453            write_double(self._base_address, self._item_value_offset(key), value)
1454            return 3
1455        elif isinstance(value, bool):
1456            write_uint64(self._base_address, self._item_value_offset(key), int(value))
1457            return 4
1458        elif value is None:
1459            return 0
1460        else:
1461            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
1462            write_uint64(self._base_address, self._item_value_offset(key), item_offset)
1463            return 1
1464    
1465    def _free_item_value_and_get_type(self, key: int) -> int:
1466        item_type = self._read_item_type(key)
1467        self._free_item_value(key, item_type)
1468        return item_type
1469    
1470    def _read_item(self, key: int) -> Any:
1471        item_type = self._read_item_type(key)
1472        return self._read_item_value(key, item_type)
1473    
1474    def _write_item(self, key: int, value: Any) -> None:
1475        item_type = self._write_item_value_and_get_type(key, value)
1476        self._write_item_type(key, item_type)
1477    
1478    def _free_item(self, key: int) -> None:
1479        item_type = self._read_item_type(key)
1480        self._free_item_value(key, item_type)
1481    
1482    def _copy_item(self, src_key: int, dst_key: int) -> None:
1483        self._write_item_type(dst_key, self._read_item_type(src_key))
1484        self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1485    
1486    def copy_item(self, src_key: int, dst_key: int) -> None:
1487        return self._copy_item(src_key, dst_key)
1488    
1489    def _move_item(self, src_key: int, dst_key: int) -> None:
1490        self._write_item_type(dst_key, self._read_item_type(src_key))
1491        self._write_item_type(src_key, 0)
1492        self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1493    
1494    def move_item(self, src_key: int, dst_key: int) -> None:
1495        return self._move_item(src_key, dst_key)
1496    
1497    def copy_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None:
1498        other._write_item_type(dst_key, self._read_item_type(src_key))
1499        other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1500    
1501    def move_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None:
1502        other._write_item_type(dst_key, self._read_item_type(src_key))
1503        self._write_item_type(src_key, 0)
1504        other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1505    
1506    def _swap_items(self, key1: int, key2: int) -> None:
1507        item_type1 = self._read_item_type(key1)
1508        item_offset_or_data1 = self._read_item_offset_or_data(key1)
1509        self._write_item_type(key1, self._read_item_type(key2))
1510        self._write_item_type(key2, item_type1)
1511        self._write_item_offset_or_data(key1, self._read_item_offset_or_data(key2))
1512        self._write_item_offset_or_data(key2, item_offset_or_data1)
1513    
1514    def swap_items(self, key1: int, key2: int) -> None:
1515        return self._swap_items(key1, key2)
1516
1517    def __len__(self) -> int:
1518        return self._list_len
1519    
1520    def get_children_data_or_offsets(self) -> List[Offset]:
1521        return [self._read_item_offset_or_data(i) for i in range(self._list_len)]
1522    
1523    def get_children_offsets(self):
1524        return self.get_children_data_or_offsets()
1525
1526    def _getitem_as_offset(self, key: int) -> Tuple[int, Offset]:
1527            return list__get_item_as_offset(key, self._base_address, self._offset__pointer_to_internal_list)
1528
1529    def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]:
1530        if isinstance(key, int):
1531            base_address = self._base_address
1532            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1533            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1534            self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1535            if key < 0 or key >= self_len:
1536                raise IndexError
1537
1538            return list__get_item(key, self._base_address, self._offset__pointer_to_internal_list, self._shared_memory.get_obj)
1539
1540            # base_address = self._base_address
1541            # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1542            # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1543            # self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1544
1545            # if key < 0:
1546            #     key += self_len
1547            
1548            # if key < 0 or key >= self_len:
1549            #     raise IndexError
1550
1551            # item_type_offset = pointer_to_internal_list + 32 + key * 16
1552            # item_value_offset = pointer_to_internal_list + 40 + key * 16
1553            # item_type = read_uint64(base_address, item_type_offset)
1554            # if item_type == 1:
1555            #     return read_int64(base_address, item_value_offset)
1556            # elif item_type == 2:
1557            #     return read_double(base_address, item_value_offset)
1558            # elif item_type == 3:
1559            #     return bool(read_uint64(base_address, item_value_offset))
1560            # elif item_type == 0:
1561            #     item_offset = read_uint64(base_address, item_value_offset)
1562            #     return self._shared_memory.get_obj(item_offset)
1563            # else:
1564            #     raise ValueError
1565
1566            # # return self._read_item(key)
1567        elif isinstance(key, slice):
1568            if key.step is not None:
1569                raise NotImplementedError
1570            
1571            if key.start is None:
1572                start = 0
1573            elif key.start < 0:
1574                start = key.start + len(self)
1575            else:
1576                start = key.start
1577            
1578            if key.stop is None:
1579                stop = len(self)
1580            elif key.stop < 0:
1581                stop = key.stop + len(self)
1582            else:
1583                stop = key.stop
1584            
1585            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1586                raise IndexError
1587            
1588            result_list = list()
1589            # performance improvement instead of using self._read_item(i)
1590            base_address = self._base_address
1591            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1592            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1593
1594            # item_type_offset = pointer_to_internal_list + 32 + i * 16
1595            item_type_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 0
1596
1597            # item_value_offset = pointer_to_internal_list + 40 + i * 16
1598            item_value_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 8
1599
1600            for i in range(start, stop):
1601                # result_list.append(self._read_item(i))
1602
1603                # performance improvement instead of using self._read_item(i)
1604                item_type = read_uint64(base_address, item_type_offset)
1605                if item_type == 2:
1606                    result_list.append(read_int64(base_address, item_value_offset))
1607                elif item_type == 3:
1608                    result_list.append(read_double(base_address, item_value_offset))
1609                elif item_type == 4:
1610                    result_list.append(bool(read_uint64(base_address, item_value_offset)))
1611                elif item_type == 0:
1612                    result_list.append(None)
1613                elif item_type == 1:
1614                    item_offset = read_uint64(base_address, item_value_offset)
1615                    result_list.append(self._shared_memory.get_obj(item_offset))
1616                else:
1617                    raise ValueError
1618            
1619            return result_list
1620        else:
1621            raise TypeError
1622
1623    def _setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item: bool = True) -> Any:
1624        value_item_type, value_item_offset = value_type_and_offset
1625        list__set_item_as_offset(key, value_item_type, value_item_offset, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj)
1626    
1627    def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence], need_to_free_item: bool = True) -> Any:
1628        if isinstance(key, int):
1629            # print(f'{key=}, {value=}, {need_to_free_item=}')
1630            # internal_list_data_offset = self._pointer_to_internal_list + 16 + 16 + key * 16 + 0
1631            # internal_list_data_size = self._list_len * 16
1632            # self._shared_memory.print_mem(internal_list_data_offset, internal_list_data_size, 'internal_list before list__set_item')
1633            
1634            base_address = self._base_address
1635            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1636            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1637            self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1638            if key < 0 or key >= self_len:
1639                raise IndexError
1640
1641            list__set_item(key, value, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj, self._shared_memory.put_obj)
1642
1643            # base_address = self._base_address
1644            # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1645            # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1646            # self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1647
1648            # if key < 0:
1649            #     key += self_len
1650            
1651            # if key < 0 or key >= self_len:
1652            #     raise IndexError
1653            
1654            # item_type_offset = pointer_to_internal_list + 32 + key * 16
1655            # item_value_offset = pointer_to_internal_list + 40 + key * 16
1656            # if isinstance(value, int):
1657            #     write_int64(base_address, item_value_offset, value)
1658            #     item_type = 1
1659            # elif isinstance(value, float):
1660            #     write_double(base_address, item_value_offset, value)
1661            #     item_type = 2
1662            # elif isinstance(value, bool):
1663            #     write_uint64(base_address, item_value_offset, int(value))
1664            #     item_type = 3
1665            # else:
1666            #     item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
1667            #     write_uint64(base_address, item_value_offset, item_offset)
1668            #     item_type = 0
1669            
1670            # write_uint64(base_address, item_type_offset, item_type)
1671
1672            # # self._write_item(key, value)
1673        elif isinstance(key, slice):
1674            if key.step is not None:
1675                raise NotImplementedError
1676            
1677            if key.start is None:
1678                start = 0
1679            elif key.start < 0:
1680                start = key.start + len(self)
1681            else:
1682                start = key.start
1683            
1684            if key.stop is None:
1685                stop = len(self)
1686            elif key.stop < 0:
1687                stop = key.stop + len(self)
1688            else:
1689                stop = key.stop
1690            
1691            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1692                raise IndexError
1693            
1694            if need_to_free_item:
1695                for i in range(start, stop):
1696                    self._free_item(i)
1697            
1698            # performance improvement instead of using self._write_item(i, item)
1699            base_address = self._base_address
1700            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1701            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1702
1703            # item_type_offset = pointer_to_internal_list + 32 + i * 16
1704            item_type_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 0
1705
1706            # item_value_offset = pointer_to_internal_list + 40 + i * 16
1707            item_value_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 8
1708
1709            for i in range(start, stop):
1710                item = value[i - start]
1711                # self._write_item(i, item)
1712
1713                # performance improvement instead of using self._write_item(i, item)
1714                if isinstance(item, int):
1715                    write_int64(base_address, item_value_offset, item)
1716                    item_type = 2
1717                elif isinstance(item, float):
1718                    write_double(base_address, item_value_offset, item)
1719                    item_type = 3
1720                elif isinstance(item, bool):
1721                    write_uint64(base_address, item_value_offset, int(item))
1722                    item_type = 4
1723                elif item is None:
1724                    item_type = 0
1725                else:
1726                    item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
1727                    write_uint64(base_address, item_value_offset, item_offset)
1728                    item_type = 1
1729                
1730                write_uint64(base_address, item_type_offset, item_type)
1731        else:
1732            raise TypeError
1733
1734    def __delitem__(self, key: Union[int, slice], need_to_free_item: bool = True) -> None:
1735        if isinstance(key, int):
1736            if key < 0:
1737                key += len(self)
1738            if key < 0 or key >= len(self):
1739                raise IndexError
1740
1741            if need_to_free_item:
1742                self._free_item(key)
1743
1744            for i in range(key + 1, len(self)):
1745                self._move_item(i, i - 1)
1746            
1747            self._list_len -= 1
1748        elif isinstance(key, slice):
1749            if key.step is not None:
1750                raise NotImplementedError
1751            
1752            if key.start is None:
1753                start = 0
1754            elif key.start < 0:
1755                start = key.start + len(self)
1756            else:
1757                start = key.start
1758            
1759            if key.stop is None:
1760                stop = len(self)
1761            elif key.stop < 0:
1762                stop = key.stop + len(self)
1763            else:
1764                stop = key.stop
1765            
1766            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1767                raise IndexError
1768            
1769            if need_to_free_item:
1770                for i in range(start, stop):
1771                    self._free_item(i)
1772            
1773            del_items_num = stop - start
1774            
1775            for i in range(stop, len(self)):
1776                self._move_item(i, i - del_items_num)
1777            
1778            self._list_len -= del_items_num
1779        else:
1780            raise TypeError
1781    
1782    def append(self, item: Any) -> None:
1783        if self._list_len > self._list_capacity:
1784            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1785
1786        self._list_len += 1
1787        self.__setitem__(self._list_len - 1, item, need_to_free_item=False)
1788    
1789    def append_as_offset(self, value_type_and_offset: Tuple[int, Offset]) -> None:
1790        if self._list_len > self._list_capacity:
1791            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1792
1793        self._list_len += 1
1794        self._setitem_as_offset(self._list_len - 1, value_type_and_offset, need_to_free_item=False)
1795    
1796    def getitem_as_offset(self, key: int) -> Tuple[int, Offset]:
1797        return self._getitem_as_offset(key)
1798    
1799    def setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item=True) -> None:
1800        self._setitem_as_offset(key, value_type_and_offset, need_to_free_item)
1801
1802    def extend(self, items: Sequence) -> None:
1803        items_num = len(items)
1804        if (self._list_len + items_num) > self._list_capacity:
1805            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
1806
1807        original_list_len = self._list_len
1808        self._list_len += items_num
1809        for i, item in enumerate(items):
1810            self.__setitem__(original_list_len + i, item, need_to_free_item=False)
1811    
1812    def extend_with(self, items_num: int, value = None) -> None:
1813        if (self._list_len + items_num) > self._list_capacity:
1814            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
1815
1816        original_list_len = self._list_len
1817        self._list_len += items_num
1818        for i in range(items_num):
1819            self.__setitem__(original_list_len + i, value, need_to_free_item=False)
1820
1821    def set_capacity(self, capacity: int) -> int:
1822        if capacity <= self._list_capacity:
1823            return
1824        
1825        self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, capacity)
1826        return result_size
1827    
1828    def insert(self, index: int, item: Any) -> None:
1829        if index < 0:
1830            index += len(self)
1831        if index < 0 or index > len(self):
1832            raise IndexError
1833
1834        if self._list_len > self._list_capacity:
1835            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'before realloc. {}')
1836            # self.print_internal_list('before realloc. {}')
1837            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1838            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after realloc. {}')
1839            # self.print_internal_list('after realloc. {}')
1840
1841        # self.print_internal_list('before inserting {}')
1842        self._list_len += 1
1843        # self.print_internal_list('before inserting but after +1 {}')
1844        for i in range(self._list_len - 1, index, -1):
1845            self._move_item(i - 1, i)
1846            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, f'after self._move_item({i - 1, i}). {{}}')
1847            # self.print_internal_list(f'after self._move_item({i - 1, i}). {{}}')
1848        
1849        self.__setitem__(index, item, need_to_free_item=False)
1850        # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after inserting. {}')
1851        # self.print_internal_list('after inserting. {}')
1852    
1853    def print_internal_list(self, text: str = None, additional_cells: int = 0):
1854        internal_list = self._shared_memory.read_mem(self._pointer_to_internal_list, 16 + 16 + self._list_len * 16 + additional_cells * 16)
1855        print('--- internal list -------------')
1856        if text:
1857            print(text.format(self._pointer_to_internal_list))
1858            print('------')
1859
1860        index = 0
1861        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1862        index += 8
1863        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1864        index += 8
1865        print('---')
1866        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1867        index += 8
1868        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1869        index += 8
1870        print('---')
1871        for i in range(self._list_len):
1872            print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8 * 2])
1873            index += 8 * 2
1874        
1875        if additional_cells:
1876            print('------')
1877            for i in range(additional_cells):
1878                print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1879                index += 8 * 2
1880        print('-------------------------------')
1881        print()
1882
1883    def pop(self, index: int = -1) -> Any:
1884        if index < 0:
1885            index += len(self)
1886        if index < 0 or index >= len(self):
1887            raise IndexError
1888
1889        result = self.__getitem__(index)
1890        
1891        for i in range(index + 1, len(self)):
1892            self._move_item(i, i - 1)
1893        
1894        self._list_len -= 1
1895        return result
1896    
1897    def remove(self, obj: Any) -> None:
1898        obj_type = self._determine_obj_type(obj)
1899        obj_offset = self._determine_obj_offset(obj)
1900        found_in_index = None
1901        for i in range(len(self)):
1902            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1903                found_in_index = i
1904                break
1905        
1906        if found_in_index is None:
1907            raise ValueError
1908        else:
1909            self.__delitem__(found_in_index)
1910    
1911    def clear(self, need_to_free_item: bool = True) -> None:
1912        if need_to_free_item:
1913            for i in range(len(self)):
1914                self._free_item(i)
1915        
1916        self._list_len = 0
1917    
1918    def __iter__(self):
1919        return IListIterator(self)
1920    
1921    def __reversed__(self):
1922        return IListReversedIterator(self)
1923    
1924    def __contains__(self, obj: Any) -> bool:
1925        obj_type = self._determine_obj_type(obj)
1926        obj_offset = self._determine_obj_offset(obj)
1927        found_in_index = None
1928        for i in range(len(self)):
1929            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1930                found_in_index = i
1931                break
1932        
1933        if found_in_index is None:
1934            return False
1935        else:
1936            return True
1937    
1938    def index(self, obj: Any, start: int = 0, stop: int = None) -> int:
1939        if stop is None:
1940            stop = len(self)
1941
1942        obj_type = self._determine_obj_type(obj)
1943        obj_offset = self._determine_obj_offset(obj)
1944        found_in_index = None
1945        for i in range(start, stop):
1946            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1947                found_in_index = i
1948                break
1949
1950        if found_in_index is None:
1951            raise ValueError
1952        else:
1953            return found_in_index
1954    
1955    def count(self, obj: Any) -> int:
1956        obj_type = self._determine_obj_type(obj)
1957        obj_offset = self._determine_obj_offset(obj)
1958        result = 0
1959        for i in range(len(self)):
1960            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1961                result += 1
1962
1963        return result
1964    
1965    def reverse(self) -> None:
1966        my_len = len(self)
1967        for i in range(my_len // 2):
1968            self._swap_items(i, my_len - i - 1)
1969    
1970    def sort(self, key: Any = None, reverse: bool = False) -> None:
1971        raise NotImplementedError
1972    
1973    def copy(self) -> 'IList':
1974        result = IList(self._shared_memory)
1975        result.extend(self)
1976        return result
1977    
1978    def __add__(self, other: Sequence) -> 'IList':
1979        result = IList(self._shared_memory)
1980        result.extend(self)
1981        result.extend(other)
1982        return result
1983    
1984    def __iadd__(self, other: Sequence) -> 'IList':
1985        self.extend(other)
1986        return self
1987    
1988    def __mul__(self, other: int) -> 'IList':
1989        result = IList(self._shared_memory)
1990        for i in range(other):
1991            result.extend(self)
1992        
1993        return result
1994    
1995    def __imul__(self, other: int) -> 'IList':
1996        my_copy: IList = self.copy()
1997        for i in range(other):
1998            self.extend(my_copy)
1999        
2000        return self
2001    
2002    def __rmul__(self, other: int) -> 'IList':
2003        return self.__mul__(other)
2004    
2005    def __eq__(self, other: Sequence) -> bool:
2006        if len(self) != len(other):
2007            return False
2008        
2009        for i in range(len(self)):
2010            if self[i] != other[i]:
2011                return False
2012        
2013        return True
2014    
2015    def __ne__(self, other: Sequence) -> bool:
2016        return not self.__eq__(other)
2017    
2018    def __lt__(self, other: Sequence) -> bool:
2019        for i in range(len(self)):
2020            if self[i] >= other[i]:
2021                return False
2022        
2023        return True
2024    
2025    def __le__(self, other: Sequence) -> bool:
2026        for i in range(len(self)):
2027            if self[i] > other[i]:
2028                return False
2029        
2030        return True
2031    
2032    def __gt__(self, other: Sequence) -> bool:
2033        for i in range(len(self)):
2034            if self[i] <= other[i]:
2035                return False
2036        
2037        return True
2038    
2039    def __ge__(self, other: Sequence) -> bool:
2040        for i in range(len(self)):
2041            if self[i] < other[i]:
2042                return False
2043        
2044        return True
2045    
2046    def __repr__(self) -> str:
2047        return f'IList({list(self)})'
2048    
2049    def __str__(self) -> str:
2050        return f'IList({list(self)})'
2051    
2052    def __hash__(self) -> int:
2053        return hash(tuple(self))
2054    
2055    def __sizeof__(self) -> int:
2056        return 16 + read_uint64(self._base_address, self._offset + 8) + 16 + read_uint64(self._base_address, self._pointer_to_internal_list, 8)
2057    
2058    def export(self) -> list:
2059        return list(self)
2060
2061    # def __del__(self) -> None:
2062    #     self._shared_memory.free(self._pointer_to_internal_list)
2063    #     self._shared_memory.free(self._offset)
2064
2065    def _free_mem(self):
2066        if self._offset is not None:
2067            if self._pointer_to_internal_list is not None:
2068                self.clear()
2069                destroy_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
2070                self._pointer_to_internal_list = 0
2071            
2072            self._shared_memory.free(self._offset)
2073            self._offset = None
2074
2075
2076# IList = IListTrue
2077
2078
2079class IListIterator:
2080    def __init__(self, ilist: IList) -> None:
2081        self._ilist = ilist
2082        self._index = 0
2083    
2084    def __next__(self):
2085        if self._index < len(self._ilist):
2086            # self._ilist.print_internal_list(f'ListIterator[{self._index}]. {{}}')
2087            result = self._ilist[self._index]
2088            self._index += 1
2089            return result
2090        else:
2091            raise StopIteration
2092    
2093    def __iter__(self):
2094        return self
2095
2096
2097class IListReversedIterator:
2098    def __init__(self, ilist: IList) -> None:
2099        self._ilist = ilist
2100        self._index = len(ilist) - 1
2101    
2102    def __next__(self):
2103        if self._index >= 0:
2104            result = self._ilist[self._index]
2105            self._index -= 1
2106            return result
2107        else:
2108            raise StopIteration
2109    
2110    def __iter__(self):
2111        return self
2112
2113
2114class TList:
2115    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: list) -> Tuple[list, Offset, Size]:
2116        obj = IList(shared_memory, obj=obj)
2117        return obj, obj._offset, obj._obj_size
2118    
2119    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2120        if ObjectType.tlist != read_uint64(shared_memory.base_address, offset):
2121            raise WrongObjectTypeError
2122        
2123        return IList(shared_memory, offset)
2124    
2125    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
2126        if ObjectType.tlist != read_uint64(shared_memory.base_address, offset):
2127            raise WrongObjectTypeError
2128        
2129        obj: IList = IList(shared_memory, offset)
2130        obj._free_mem()
2131
2132
2133# ======================================================================================================================
2134# === Tuple ============================================================================================================
2135
2136
2137class TupleOffsets(IntEnum):
2138    size = 0
2139
2140
2141class TupleFieldOffsets(IntEnum):
2142    item_offset = 0
2143
2144
2145class TTuple:
2146    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: tuple) -> Tuple[tuple, Offset, Size]:
2147        offset, real_size = shared_memory.malloc(ObjectType.ttuple, 8 + len(obj) * 8)
2148        created_items_offsets: List[Offset] = list()
2149        try:
2150            if (1, [2, 3]) == obj:
2151                shared_memory.offset_to_be_monitored = offset
2152            
2153            write_uint64(shared_memory.base_address, offset + 16 + 0, len(obj))
2154            for i, item in enumerate(obj):
2155                item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item)
2156                created_items_offsets.append(item_offset)
2157                write_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8, item_offset)
2158        except:
2159            shared_memory.free(offset)
2160            for item_offset in created_items_offsets:
2161                shared_memory.destroy_obj(item_offset)
2162            
2163            raise
2164        
2165        return obj, offset, real_size
2166    
2167    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2168        if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset):
2169            raise WrongObjectTypeError
2170
2171        result_list = list()
2172        size = read_uint64(shared_memory.base_address, offset + 16 + 0)
2173        for i in range(size):
2174            item_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8)
2175            result_list.append(shared_memory.get_obj(item_offset))
2176        
2177        return tuple(result_list)
2178    
2179    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
2180        if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset):
2181            raise WrongObjectTypeError
2182
2183        size = read_uint64(shared_memory.base_address, offset + 16 + 0)
2184        for i in range(size):
2185            item_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8)
2186            shared_memory.destroy_obj(item_offset)
2187        
2188        shared_memory.free(offset)
2189
2190
2191# ======================================================================================================================
2192# === DatetimeTypes =============================================================================================================
2193
2194
2195class DatetimeOffsets(IntEnum):
2196    data_bytes_offset = 0
2197
2198
2199DatetimeTypes = Union[datetime, timedelta, timezone, date, time]
2200
2201
2202class TDatetime:
2203    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: DatetimeTypes) -> Tuple[DatetimeTypes, Offset, Size]:
2204        offset, real_size = shared_memory.malloc(ObjectType.tdatetime, 8)
2205        created_items_offsets: List[Offset] = list()
2206        try:
2207            data_tuple_mapped_obj, data_bytes_offset, data_tuple_size = shared_memory.put_obj(pickle_dumps(obj))
2208            created_items_offsets.append(data_bytes_offset)
2209            write_uint64(shared_memory.base_address, offset + 16 + 0, data_bytes_offset)
2210        except:
2211            shared_memory.free(offset)
2212            for item_offset in created_items_offsets:
2213                shared_memory.destroy_obj(item_offset)
2214            
2215            raise
2216
2217        return pickle_loads(data_tuple_mapped_obj), offset, real_size
2218    
2219    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> DatetimeTypes:
2220        if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset):
2221            raise WrongObjectTypeError
2222
2223        data_bytes_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2224        result_tuple = shared_memory.get_obj(data_bytes_offset)
2225        return pickle_loads(result_tuple)
2226    
2227    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2228        if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset):
2229            raise WrongObjectTypeError
2230
2231        data_bytes_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2232        shared_memory.destroy_obj(data_bytes_offset)
2233        shared_memory.free(offset)
2234
2235
2236# ======================================================================================================================
2237# === Decimal =============================================================================================================
2238
2239
2240class DecimalOffsets(IntEnum):
2241    data_tuple_offset = 0
2242
2243
2244class TDecimal:
2245    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Decimal) -> Tuple[Decimal, Offset, Size]:
2246        offset, real_size = shared_memory.malloc(ObjectType.tdecimal, 8)
2247        created_items_offsets: List[Offset] = list()
2248        try:
2249            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.as_tuple()))
2250            created_items_offsets.append(data_tuple_offset)
2251            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2252        except:
2253            shared_memory.free(offset)
2254            for item_offset in created_items_offsets:
2255                shared_memory.destroy_obj(item_offset)
2256            
2257            raise
2258
2259        return Decimal(data_tuple_mapped_obj), offset, real_size
2260    
2261    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Decimal:
2262        if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset):
2263            raise WrongObjectTypeError
2264
2265        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2266        result_tuple = shared_memory.get_obj(data_tuple_offset)
2267        return Decimal(result_tuple)
2268    
2269    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2270        if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset):
2271            raise WrongObjectTypeError
2272
2273        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2274        shared_memory.destroy_obj(data_tuple_offset)
2275        shared_memory.free(offset)
2276
2277
2278# ======================================================================================================================
2279# === Slice =============================================================================================================
2280
2281
2282class SliceOffsets(IntEnum):
2283    data_tuple_offset = 0
2284
2285
2286class TSlice:
2287    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: slice) -> Tuple[slice, Offset, Size]:
2288        offset, real_size = shared_memory.malloc(ObjectType.tslice, 8)
2289        created_items_offsets: List[Offset] = list()
2290        try:
2291            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.start, obj.stop, obj.step))
2292            created_items_offsets.append(data_tuple_offset)
2293            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2294        except:
2295            shared_memory.free(offset)
2296            for item_offset in created_items_offsets:
2297                shared_memory.destroy_obj(item_offset)
2298            
2299            raise
2300
2301        return slice(*data_tuple_mapped_obj), offset, real_size
2302    
2303    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> slice:
2304        if ObjectType.tslice != read_uint64(shared_memory.base_address, offset):
2305            raise WrongObjectTypeError
2306
2307        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2308        result_tuple = shared_memory.get_obj(data_tuple_offset)
2309        return slice(*result_tuple)
2310    
2311    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2312        if ObjectType.tslice != read_uint64(shared_memory.base_address, offset):
2313            raise WrongObjectTypeError
2314
2315        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2316        shared_memory.destroy_obj(data_tuple_offset)
2317        shared_memory.free(offset)
2318
2319
2320# ======================================================================================================================
2321# === Complex =============================================================================================================
2322
2323
2324class ComplexOffsets(IntEnum):
2325    data_tuple_offset = 0
2326
2327
2328class TComplex:
2329    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: complex) -> Tuple[complex, Offset, Size]:
2330        offset, real_size = shared_memory.malloc(ObjectType.tfastset, 8)
2331        created_items_offsets: List[Offset] = list()
2332        try:
2333            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.real, obj.imag))
2334            created_items_offsets.append(data_tuple_offset)
2335            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2336        except:
2337            shared_memory.free(offset)
2338            for item_offset in created_items_offsets:
2339                shared_memory.destroy_obj(item_offset)
2340            
2341            raise
2342
2343        return complex(real=data_tuple_mapped_obj[0], imag=data_tuple_mapped_obj[1]), offset, real_size
2344    
2345    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> complex:
2346        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2347            raise WrongObjectTypeError
2348
2349        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2350        result_tuple = shared_memory.get_obj(data_tuple_offset)
2351        return complex(real=result_tuple[0], imag=result_tuple[1])
2352    
2353    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2354        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2355            raise WrongObjectTypeError
2356
2357        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2358        shared_memory.destroy_obj(data_tuple_offset)
2359        shared_memory.free(offset)
2360
2361
2362# ======================================================================================================================
2363# === FastSet =============================================================================================================
2364
2365
2366class FastLimitedSet(set):
2367    ...
2368
2369
2370class FastSetOffsets(IntEnum):
2371    data_tuple_offset = 0
2372
2373
2374class TFastSet:
2375    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[set, Offset, Size]:
2376        offset, real_size = shared_memory.malloc(ObjectType.tfastset, 8)
2377        created_items_offsets: List[Offset] = list()
2378        try:
2379            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj))
2380            created_items_offsets.append(data_tuple_offset)
2381            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2382        except:
2383            shared_memory.free(offset)
2384            for item_offset in created_items_offsets:
2385                shared_memory.destroy_obj(item_offset)
2386            
2387            raise
2388
2389        return set(data_tuple_mapped_obj), offset, real_size
2390    
2391    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> set:
2392        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2393            raise WrongObjectTypeError
2394
2395        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2396        result_tuple = shared_memory.get_obj(data_tuple_offset)
2397        return set(result_tuple)
2398    
2399    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2400        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2401            raise WrongObjectTypeError
2402
2403        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2404        shared_memory.destroy_obj(data_tuple_offset)
2405        shared_memory.free(offset)
2406
2407
2408# ======================================================================================================================
2409# === FastDict =============================================================================================================
2410
2411
2412class FastLimitedDict(dict):
2413    ...
2414
2415
2416class FastDictOffsets(IntEnum):
2417    data_tuple_offset = 0
2418
2419
2420class TFastDict:
2421    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: dict) -> Tuple[dict, Offset, Size]:
2422        offset, real_size = shared_memory.malloc(ObjectType.tfastdict, 8)
2423        created_items_offsets: List[Offset] = list()
2424        try:
2425            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.items()))
2426            created_items_offsets.append(data_tuple_offset)
2427            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2428        except:
2429            shared_memory.free(offset)
2430            for item_offset in created_items_offsets:
2431                shared_memory.destroy_obj(item_offset)
2432            
2433            raise
2434
2435        return dict(data_tuple_mapped_obj), offset, real_size
2436    
2437    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
2438        if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset):
2439            raise WrongObjectTypeError
2440
2441        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2442        result_tuple = shared_memory.get_obj(data_tuple_offset)
2443        return dict(result_tuple)
2444    
2445    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2446        if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset):
2447            raise WrongObjectTypeError
2448
2449        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2450        shared_memory.destroy_obj(data_tuple_offset)
2451        shared_memory.free(offset)
2452
2453
2454# ======================================================================================================================
2455# === Set =============================================================================================================
2456
2457
2458class SetOffsets(IntEnum):
2459    size = 0
2460    capacity = 1
2461    hashmap_offset = 2
2462
2463
2464class SetHashmapFieldTypes(IntEnum):
2465    tnone = 0
2466    tobj = 1
2467    tbucket = 2
2468
2469
2470class SetHashmapItemOffsets(IntEnum):
2471    field_type = 0
2472    field_hash = 1
2473    obj_or_bucket = 2
2474
2475
2476class SetBucketOffsets(IntEnum):
2477    field_hash = 0
2478    obj = 1
2479
2480
2481class ISet(BaseIObject, AbsSet):
2482    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsSet = None) -> None:
2483        self._shared_memory = shared_memory
2484        self._base_address = shared_memory.base_address
2485        self._obj_size = None
2486        self._offset: Offset = None
2487        self._offset__data: Offset = None
2488        self._offset__size_offset: Offset = None
2489        self._offset__capacity_offset: Offset = None
2490        self._offset__hashmap_offset: Offset = None
2491        self._load_factor = 0.75
2492        self._hash_bits: int = None
2493        self._capacity: int = None
2494        self._size: int = None
2495        self.hashmap: IList = None
2496        self.hashmap_offset: Offset = None
2497        self.buckets: Dict[int, IList] = dict()
2498
2499        if offset is None:
2500            if obj is None:
2501                # obj = frozenset(set())
2502                data_len = 16
2503            else:
2504                data_len = len(obj)
2505
2506            self._size: int = data_len
2507            self.hash_bits = 1
2508            self.capacity = int(ceil(data_len / self._load_factor))
2509
2510            offset, self._obj_size = shared_memory.malloc(ObjectType.tset, 24)
2511            try:
2512                self._offset = offset
2513                offset__data = offset + 16
2514                self._offset__data = offset__data
2515                self._offset__size_offset: Offset = offset__data + 0
2516                self._offset__capacity_offset: Offset = offset__data + 8
2517                self._offset__hashmap_offset = offset__data + 16
2518
2519                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
2520                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
2521
2522                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
2523                self.hashmap = cast(IList, self.hashmap)
2524                self.hashmap_offset = hashmap_offset
2525                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
2526                hashmap_capacity = self.capacity * 3
2527                self.hashmap.set_capacity(hashmap_capacity)
2528                self.hashmap.extend_with(hashmap_capacity, 0)
2529                hash_bits: int = self.hash_bits
2530                if obj is not None:
2531                    for item in obj:
2532                        item_hash = hash(item)
2533                        item_info_index: int = mask_least_significant_bits(item_hash, hash_bits) * 3
2534                        field_type_index = item_info_index + 0
2535                        item_hash_index = item_info_index + 1
2536                        item_bucket_index = item_info_index + 2
2537                        field_type = self.hashmap[field_type_index]
2538                        if 0 == field_type:
2539                            self.hashmap[field_type_index] = 1
2540                            self.hashmap[item_hash_index] = item_hash
2541                            self.hashmap[item_bucket_index] = item
2542                        elif 1 == field_type:
2543                            bucket, bucket_offset, _ = shared_memory.put_obj(list())
2544                            bucket = cast(IList, bucket)
2545                            bucket.set_capacity(2)
2546                            bucket.extend_with(2, 0)
2547                            self.buckets[item_info_index] = bucket
2548                            self.hashmap.move_item_to_list(item_hash_index, bucket, 0)
2549                            self.hashmap.move_item_to_list(item_bucket_index, bucket, 1)
2550                            self.hashmap[field_type_index] = 2
2551                            self.hashmap[item_bucket_index] = bucket_offset
2552                            bucket.append(item_hash)
2553                            bucket.append(item)
2554                        elif 2 == field_type:
2555                            bucket = self.buckets[item_info_index]
2556                            bucket.append(item_hash)
2557                            bucket.append(item)
2558                        else:
2559                            raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2560            except:
2561                self._free_mem()
2562                raise
2563        else:
2564            self._offset = offset
2565            offset__data = offset + 16
2566            self._offset__data = offset__data
2567            self._offset__size_offset: Offset = offset__data + 0
2568            self._offset__capacity_offset: Offset = offset__data + 8
2569            self._offset__hashmap_offset = offset__data + 16
2570
2571            self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2572            self.hash_bits = 1
2573            self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2574            hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2575            
2576            self.hashmap_offset = hashmap_offset
2577            self.hashmap = IList(shared_memory, hashmap_offset)
2578            item_info_index: int = 0
2579            # for item_info_index in range(self.capacity):
2580            #     field_type_index = item_info_index * 3 + 0
2581            #     item_hash_index = item_info_index * 3 + 1
2582            #     item_bucket_index = item_info_index * 3 + 2
2583            #     field_type = self.hashmap[field_type_index]
2584            #     if 0 == field_type:
2585            #         continue
2586            #     elif 1 == field_type:
2587            #         continue
2588            #     elif 2 == field_type:
2589            #         bucket_offset = self.hashmap[item_bucket_index]
2590            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2591            #     else:
2592            #         raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2593
2594            for item_info_index in range(0, self.capacity * 3, 3):
2595                field_type_index = item_info_index + 0
2596                item_hash_index = item_info_index + 1
2597                item_bucket_index = item_info_index + 2
2598                field_type = self.hashmap[field_type_index]
2599                if 0 == field_type:
2600                    continue
2601                elif 1 == field_type:
2602                    continue
2603                elif 2 == field_type:
2604                    bucket_offset = self.hashmap[item_bucket_index]
2605                    self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2606                else:
2607                    raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2608
2609    def __len__(self):
2610        return self._size
2611    
2612    def __iter__(self):
2613        return ISetIterator(self)
2614    
2615    def __contains__(self, obj: Any) -> bool:
2616        item_hash = hash(obj)
2617        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
2618        field_type_index = item_info_index + 0
2619        item_hash_index = item_info_index + 1
2620        item_bucket_index = item_info_index + 2
2621        field_type = self.hashmap[field_type_index]
2622        if 0 == field_type:
2623            return False
2624        elif 1 == field_type:
2625            return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index])
2626        elif 2 == field_type:
2627            bucket = self.buckets[item_info_index]
2628            # for sub_item_info_index in range(len(bucket)):
2629            for sub_item_info_index in range(0, len(bucket) * 2, 2):
2630                sub_item_hash_index = sub_item_info_index + 0
2631                sub_item_obj_index = sub_item_info_index + 1
2632                if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_obj_index]):
2633                    return True
2634            
2635            return False
2636        else:
2637            raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2638
2639    def __hash__(self):
2640        return self._hash()
2641
2642    @property
2643    def hash_bits(self) -> int:
2644        return self._hash_bits
2645
2646    @hash_bits.setter
2647    def hash_bits(self, value: int) -> None:
2648        self._hash_bits = value
2649        self._capacity = 2 ** value
2650    
2651    @property
2652    def capacity(self) -> int:
2653        return self._capacity
2654
2655    @capacity.setter
2656    def capacity(self, value: int) -> None:
2657        if value <= self._capacity:
2658            return
2659        
2660        if value <= 2:
2661            self.hash_bits = 1
2662        else:
2663            self.hash_bits = int(ceil(log2(value)))
2664    
2665    def __str__(self) -> str:
2666        return set(self).__str__()
2667
2668    def __repr__(self) -> str:
2669        return set(self).__repr__()
2670
2671    def _free_mem(self):
2672        if self._offset is not None:
2673            for _, bucket in self.buckets.items():
2674                self._shared_memory.destroy_obj(bucket._offset)
2675            
2676            self.buckets.clear()
2677            if self.hashmap_offset is not None:
2678                self._shared_memory.destroy_obj(self.hashmap_offset)
2679                self.hashmap_offset = None
2680
2681            self._shared_memory.free(self._offset)
2682            self._offset = None
2683
2684
2685class ISetIterator:
2686    def __init__(self, iset: ISet) -> None:
2687        self._iset = iset
2688        self._index = 0
2689        self._sub_index = 0
2690    
2691    def __next__(self):
2692        while self._index < self._iset.capacity:
2693            item_info_index: int = self._index * 3
2694            field_type_index = item_info_index + 0
2695            item_hash_index = item_info_index + 1
2696            item_bucket_index = item_info_index + 2
2697            field_type = self._iset.hashmap[field_type_index]
2698            if 0 == field_type:
2699                self._index += 1
2700                continue
2701            elif 1 == field_type:
2702                result = self._iset.hashmap[item_bucket_index]
2703                self._index += 1
2704                break
2705            elif 2 == field_type:
2706                bucket = self._iset.buckets[item_info_index]
2707                sub_item_info_index = self._sub_index
2708                sub_item_hash_index = sub_item_info_index * 2 + 0
2709                sub_item_obj_index = sub_item_info_index * 2 + 1
2710                if (sub_item_info_index * 2) >= len(bucket):
2711                    self._sub_index = 0
2712                    self._index += 1
2713                    continue
2714
2715                result = bucket[sub_item_obj_index]
2716                self._sub_index += 1
2717                break
2718            else:
2719                raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2720        else:
2721            raise StopIteration
2722
2723        return result
2724    
2725    def __iter__(self):
2726        return self
2727
2728
2729class TSet:
2730    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[AbsSet, Offset, Size]:
2731        obj: ISet = ISet(shared_memory, obj=obj)
2732        return obj, obj._offset, obj._obj_size
2733    
2734    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> ISet:
2735        if ObjectType.tset != read_uint64(shared_memory.base_address, offset):
2736            raise WrongObjectTypeError
2737        
2738        return ISet(shared_memory, offset)
2739    
2740    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2741        if ObjectType.tset != read_uint64(shared_memory.base_address, offset):
2742            raise WrongObjectTypeError
2743        
2744        obj: ISet = ISet(shared_memory, offset)
2745        obj._free_mem()
2746
2747
2748# ======================================================================================================================
2749# === MutableSet =============================================================================================================
2750
2751
2752class MutableSetOffsets(IntEnum):
2753    size = 0
2754    capacity = 1
2755    hashmap_offset = 2
2756    refresh_counter = 3
2757
2758
2759class MutableSetHashmapFieldTypes(IntEnum):
2760    tnone = 0
2761    tobj = 1
2762    tbucket = 2
2763
2764
2765class MutableSetHashmapItemOffsets(IntEnum):
2766    field_type = 0
2767    field_hash = 1
2768    obj_or_bucket = 2
2769
2770
2771class MutableSetBucketFieldTypes(IntEnum):
2772    tnone = 0
2773    tobj = 1
2774
2775
2776class MutableSetBucketOffsets(IntEnum):
2777    field_type = 0
2778    field_hash = 1
2779    obj = 2
2780
2781
2782class IMutableSet(BaseIObject, AbsMutableSet):
2783    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableSet = None) -> None:
2784        self._shared_memory = shared_memory
2785        self._base_address = shared_memory.base_address
2786        self._obj_size = None
2787        self._offset: Offset = None
2788        self._offset__data: Offset = None
2789        self._offset__size_offset: Offset = None
2790        self._offset__capacity_offset: Offset = None
2791        self._offset__hashmap_offset: Offset = None
2792        self._offset__refresh_counter_offset: Offset = None
2793        self._load_factor = 0.75
2794        self._load_factor_2 = 0.5625
2795        self._hash_bits: int = None
2796        self._capacity: int = None
2797        self._min_capacity: int = None
2798        self._size: int = None
2799        self.hashmap: IList = None
2800        self._refresh_counter: int = 0
2801        self.hashmap_offset: Offset = None
2802        self.buckets: Dict[int, IList] = dict()
2803
2804        self.ignore_rehash: bool = True
2805
2806        if offset is None:
2807            if obj is None:
2808                # obj = frozenset(set())
2809                data_len = 16
2810            else:
2811                data_len = len(obj)
2812
2813            self._size = 0
2814            self.hash_bits = 1
2815            self.capacity = int(ceil(data_len / self._load_factor))
2816            self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2817
2818            offset, self._obj_size = shared_memory.malloc(ObjectType.tmutableset, 32)
2819            try:
2820                self._offset = offset
2821                offset__data = offset + 16
2822                self._offset__data = offset__data
2823                self._offset__size_offset: Offset = offset__data + 0
2824                self._offset__capacity_offset: Offset = offset__data + 8
2825                self._offset__hashmap_offset = offset__data + 16
2826                self._offset__refresh_counter_offset = offset__data + 24
2827
2828                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
2829                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
2830                write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter)
2831
2832                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
2833                self.hashmap = cast(IList, self.hashmap)
2834                self.hashmap_offset = hashmap_offset
2835                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
2836                hashmap_capacity = self.capacity * 3
2837                self.hashmap.set_capacity(hashmap_capacity)
2838                self.hashmap.extend_with(hashmap_capacity, 0)
2839                hash_bits: int = self.hash_bits
2840                if obj is None:
2841                    pass
2842                elif isinstance(obj, IMutableSet):
2843                    self._move_from(obj)
2844                else:
2845                    for item in obj:
2846                        self.add(item)
2847                
2848                self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
2849                
2850                self.ignore_rehash = False
2851            except:
2852                self._free_mem()
2853                raise
2854        else:
2855            self._refresh_hashmap(offset)
2856            self.ignore_rehash = False
2857
2858            # self._offset = offset
2859            # offset__data = offset + 16
2860            # self._offset__data = offset__data
2861            # self._offset__size_offset: Offset = offset__data + 0
2862            # self._offset__capacity_offset: Offset = offset__data + 8
2863            # self._offset__hashmap_offset = offset__data + 16
2864
2865            # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2866            # self.hash_bits = 1
2867            # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2868            # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2869            # self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2870            
2871            # self.hashmap_offset = hashmap_offset
2872            # self.hashmap = IList(shared_memory, hashmap_offset)
2873            # item_info_index: int = 0
2874            # # for item_info_index in range(self.capacity):
2875            # #     field_type_index = item_info_index * 3 + 0
2876            # #     item_hash_index = item_info_index * 3 + 1
2877            # #     item_bucket_index = item_info_index * 3 + 2
2878            # #     field_type = self.hashmap[field_type_index]
2879            # #     if 0 == field_type:
2880            # #         continue
2881            # #     elif 1 == field_type:
2882            # #         continue
2883            # #     elif 2 == field_type:
2884            # #         bucket_offset = self.hashmap[item_bucket_index]
2885            # #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2886            # #     else:
2887            # #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2888
2889            # for item_info_index in range(0, self.capacity * 3, 3):
2890            #     field_type_index = item_info_index + 0
2891            #     item_hash_index = item_info_index + 1
2892            #     item_bucket_index = item_info_index + 2
2893            #     field_type = self.hashmap[field_type_index]
2894            #     if 0 == field_type:
2895            #         continue
2896            #     elif 1 == field_type:
2897            #         continue
2898            #     elif 2 == field_type:
2899            #         bucket_offset = self.hashmap[item_bucket_index]
2900            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2901            #     else:
2902            #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2903
2904            # self.ignore_rehash = False
2905
2906    def _refresh_hashmap(self, offset: Offset):
2907        # ignore_rehash = self.ignore_rehash
2908        # self.ignore_rehash = True
2909
2910        self._hash_bits = None
2911        self._capacity = None
2912        self._min_capacity = None
2913        self._size = None
2914        self.hashmap = None
2915        self._refresh_counter = 0
2916        self.hashmap_offset = None
2917        self.buckets = dict()
2918
2919        shared_memory = self._shared_memory
2920        self._offset = offset
2921        offset__data = offset + 16
2922        self._offset__data = offset__data
2923        self._offset__size_offset: Offset = offset__data + 0
2924        self._offset__capacity_offset: Offset = offset__data + 8
2925        self._offset__hashmap_offset = offset__data + 16
2926        self._offset__refresh_counter_offset = offset__data + 24
2927
2928        self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
2929        self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2930        self.hash_bits = 1
2931        self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2932        hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2933        self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2934        
2935        self.hashmap_offset = hashmap_offset
2936        self.hashmap = IList(shared_memory, hashmap_offset)
2937        item_info_index: int = 0
2938        # for item_info_index in range(self.capacity):
2939        #     field_type_index = item_info_index * 3 + 0
2940        #     item_hash_index = item_info_index * 3 + 1
2941        #     item_bucket_index = item_info_index * 3 + 2
2942        #     field_type = self.hashmap[field_type_index]
2943        #     if 0 == field_type:
2944        #         continue
2945        #     elif 1 == field_type:
2946        #         continue
2947        #     elif 2 == field_type:
2948        #         bucket_offset = self.hashmap[item_bucket_index]
2949        #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2950        #     else:
2951        #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2952
2953        for item_info_index in range(0, self.capacity * 3, 3):
2954            field_type_index = item_info_index + 0
2955            item_hash_index = item_info_index + 1
2956            item_bucket_index = item_info_index + 2
2957            field_type = self.hashmap[field_type_index]
2958            if 0 == field_type:
2959                continue
2960            elif 1 == field_type:
2961                continue
2962            elif 2 == field_type:
2963                bucket_offset = self.hashmap[item_bucket_index]
2964                self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2965            else:
2966                raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2967
2968        # self.ignore_rehash = ignore_rehash
2969    
2970    @property
2971    def refresh_counter(self):
2972        return read_uint64(self._base_address, self._offset__refresh_counter_offset)
2973    
2974    def _increase_refresh_counter(self):
2975        if not self.ignore_rehash:
2976            self._refresh_counter += 1
2977            write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter)
2978    
2979    def _check_hashmap(self):
2980        if self.ignore_rehash:
2981            return False
2982        else:
2983            base_address = self._base_address
2984            refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset)
2985            # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset)
2986            # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset):
2987            if self._refresh_counter != refresh_counter:
2988                self._refresh_hashmap(self._offset)
2989                return True
2990            
2991            return False
2992
2993    # @property
2994    # def hashmap(self) -> IList:
2995    #     if self.ignore_rehash:
2996    #         return self._hashmap
2997    #     else:
2998    #         hashmap_offset = read_uint64(self._base_address, self._offset__hashmap_offset)
2999    #         if (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset):
3000    #             self._refresh_hashmap(self._offset)
3001            
3002    #         return self._hashmap
3003    
3004    # @hashmap.setter
3005    # def hashmap(self, value: IList):
3006    #     self._hashmap = value
3007
3008    def _increase_size(self):
3009        self._size += 1
3010        write_uint64(self._base_address, self._offset__size_offset, self._size)
3011        if (self._size > self._capacity) or (self._size < self._min_capacity):
3012            self._rehash()
3013    
3014    def _decrease_size(self):
3015        self._size -= 1
3016        if self._size < 0:
3017            raise RuntimeError('Size of the set is negative')
3018
3019        write_uint64(self._base_address, self._offset__size_offset, self._size)
3020        if (self._size > self._capacity) or (self._size < self._min_capacity):
3021            self._rehash()
3022    
3023    def _move_from(self, other: 'IMutableSet'):
3024        for value_hash, value_type, value_offset in other.iter_offset_pop():
3025            self.add_as_offset(value_hash, value_type, value_offset)
3026    
3027    def _rehash(self):
3028        if self.ignore_rehash:
3029            return
3030        
3031        self._increase_refresh_counter()
3032
3033        ignore_rehash = self.ignore_rehash
3034        self.ignore_rehash = True
3035
3036        new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self)
3037        new_other = cast(IMutableSet, new_other)
3038
3039        other_capacity = new_other._capacity
3040        other_hash_bits = new_other._hash_bits
3041        other_min_capacity = new_other._min_capacity
3042        other_size = new_other._size
3043        # other_refresh_counter = new_other._refresh_counter
3044        other_hashmap = new_other.hashmap
3045        other_hashmap_offset = new_other.hashmap_offset
3046        other_buckets = new_other.buckets
3047        other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset)
3048        other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset)
3049        other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset)
3050        # other_refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset)
3051        
3052        new_other._capacity = self._capacity
3053        new_other._hash_bits = self._hash_bits
3054        new_other._min_capacity = self._min_capacity
3055        new_other._size = self._size
3056        # new_other._refresh_counter = self._refresh_counter
3057        new_other.hashmap = self.hashmap
3058        new_other.hashmap_offset = self.hashmap_offset
3059        new_other.buckets = self.buckets
3060        write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset))
3061        write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset))
3062        write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset))
3063        # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset))
3064
3065        self._capacity = other_capacity
3066        self._hash_bits = other_hash_bits
3067        self._min_capacity = other_min_capacity
3068        self._size = other_size
3069        # self._refresh_counter = other_refresh_counter
3070        self.hashmap = other_hashmap
3071        self.hashmap_offset = other_hashmap_offset
3072        self.buckets = other_buckets
3073        write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin)
3074        write_uint64(self._base_address, self._offset__size_offset, other_size_bin)
3075        write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin)
3076        # write_uint64(self._base_address, self._offset__refresh_counter_offset, other_refresh_counter_bin)
3077
3078        self._shared_memory.destroy_obj(new_other_offset)
3079
3080        self.ignore_rehash = ignore_rehash
3081
3082    def __len__(self):
3083        self._check_hashmap()
3084        return self._size
3085    
3086    def __iter__(self):
3087        self._check_hashmap()
3088        return IMutableSetIterator(self)
3089    
3090    def iter_offset(self):
3091        self._check_hashmap()
3092        return IMutableSetIteratorAsOffset(self)
3093    
3094    def iter_offset_pop(self):
3095        self._check_hashmap()
3096        return IMutableSetIteratorAsOffset(self, True)
3097    
3098    def __contains__(self, obj: Any) -> bool:
3099        self._check_hashmap()
3100        item_hash = hash(obj)
3101        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3102        field_type_index = item_info_index + 0
3103        item_hash_index = item_info_index + 1
3104        item_bucket_index = item_info_index + 2
3105        field_type = self.hashmap[field_type_index]
3106        if 0 == field_type:
3107            return False
3108        elif 1 == field_type:
3109            return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index])
3110        elif 2 == field_type:
3111            bucket_offset = self.hashmap[item_bucket_index]
3112            try:
3113                bucket = self.buckets[item_info_index]
3114                if bucket._offset != bucket_offset:
3115                    raise KeyError
3116            except KeyError:
3117                raise
3118                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3119
3120            for bucket_item_index in range(0, len(bucket), 3):
3121                bucket_field_type = bucket[bucket_item_index + 0]
3122                if 0 == bucket_field_type:
3123                    continue
3124
3125                bucket_field_hash = bucket[bucket_item_index + 1]
3126                bucket_obj = bucket[bucket_item_index + 2]
3127                if (item_hash == bucket_field_hash) and (obj == bucket_obj):
3128                    return True
3129            
3130            return False
3131        else:
3132            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3133
3134    def add(self, value):
3135        """Add an element."""
3136        self._check_hashmap()
3137        item = value
3138        item_hash = hash(item)
3139        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3140        field_type_index = item_info_index + 0
3141        item_hash_index = item_info_index + 1
3142        item_bucket_index = item_info_index + 2
3143        field_type = self.hashmap[field_type_index]
3144        if 0 == field_type:
3145            self.hashmap[field_type_index] = 1
3146            self.hashmap[item_hash_index] = item_hash
3147            self.hashmap[item_bucket_index] = item
3148            self._increase_size()
3149            return
3150        elif 1 == field_type:
3151            if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap[item_bucket_index]):
3152                return
3153            
3154            self._increase_refresh_counter()
3155            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
3156            bucket = cast(IList, bucket)
3157            bucket.set_capacity(3)
3158            bucket.extend_with(3, 0)
3159            self.buckets[item_info_index] = bucket
3160            bucket[0] = 1
3161            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
3162            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
3163            self.hashmap[field_type_index] = 2
3164            self.hashmap[item_bucket_index] = bucket_offset
3165            bucket.append(1)
3166            bucket.append(item_hash)
3167            bucket.append(item)
3168            self._increase_size()
3169            return
3170        elif 2 == field_type:
3171            bucket_offset = self.hashmap[item_bucket_index]
3172            try:
3173                bucket = self.buckets[item_info_index]
3174                if bucket._offset != bucket_offset:
3175                    raise KeyError
3176            except KeyError:
3177                raise
3178                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3179
3180            bucket_len: int = len(bucket)
3181            for bucket_item_index in range(0, bucket_len, 3):
3182                bucket_field_type = bucket[bucket_item_index + 0]
3183                if 1 == bucket_field_type:
3184                    if (item_hash == bucket[bucket_item_index + 1]) and (item == bucket[bucket_item_index + 2]):
3185                        return
3186            
3187            for bucket_item_index in range(0, bucket_len, 3):
3188                bucket_field_type = bucket[bucket_item_index + 0]
3189                if 0 == bucket_field_type:
3190                    bucket[bucket_item_index + 0] = 1
3191                    bucket[bucket_item_index + 1] = item_hash
3192                    bucket[bucket_item_index + 2] = item
3193                    self._increase_size()
3194                    return
3195            else:
3196                bucket.append(1)
3197                bucket.append(item_hash)
3198                bucket.append(item)
3199                self._increase_size()
3200                return
3201        else:
3202            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3203
3204    def add_as_offset(self, value_hash, value_type, value_offset):
3205        """Add an element."""
3206        self._check_hashmap()
3207        item = (value_type, value_offset)
3208        item_hash = value_hash
3209        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3210        field_type_index = item_info_index + 0
3211        item_hash_index = item_info_index + 1
3212        item_bucket_index = item_info_index + 2
3213        field_type = self.hashmap[field_type_index]
3214        if 0 == field_type:
3215            self.hashmap[field_type_index] = 1
3216            self.hashmap[item_hash_index] = item_hash
3217            self.hashmap.setitem_as_offset(item_bucket_index, item)
3218            self._increase_size()
3219            return
3220        elif 1 == field_type:
3221            if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap.getitem_as_offset(item_bucket_index)):
3222                return
3223            
3224            self._increase_refresh_counter()
3225            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
3226            bucket = cast(IList, bucket)
3227            bucket.set_capacity(3)
3228            bucket.extend_with(3, 0)
3229            self.buckets[item_info_index] = bucket
3230            bucket[0] = 1
3231            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
3232            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
3233            self.hashmap[field_type_index] = 2
3234            self.hashmap[item_bucket_index] = bucket_offset
3235            bucket.append(1)
3236            bucket.append(item_hash)
3237            bucket.append_as_offset(item)
3238            self._increase_size()
3239            return
3240        elif 2 == field_type:
3241            bucket_offset = self.hashmap[item_bucket_index]
3242            try:
3243                bucket = self.buckets[item_info_index]
3244                if bucket._offset != bucket_offset:
3245                    raise KeyError
3246            except KeyError:
3247                raise
3248                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3249
3250            bucket_len: int = len(bucket)
3251            for bucket_item_index in range(0, bucket_len, 3):
3252                bucket_field_type = bucket[bucket_item_index + 0]
3253                if 1 == bucket_field_type:
3254                    if (item_hash == bucket[bucket_item_index + 1]) and (item == bucket.getitem_as_offset(bucket_item_index + 2)):
3255                        return
3256            
3257            for bucket_item_index in range(0, bucket_len, 3):
3258                bucket_field_type = bucket[bucket_item_index + 0]
3259                if 0 == bucket_field_type:
3260                    bucket[bucket_item_index + 0] = 1
3261                    bucket[bucket_item_index + 1] = item_hash
3262                    bucket.setitem_as_offset(bucket_item_index + 2, item)
3263                    self._increase_size()
3264                    return
3265            else:
3266                bucket.append(1)
3267                bucket.append(item_hash)
3268                bucket.append_as_offset(item)
3269                self._increase_size()
3270                return
3271        else:
3272            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3273
3274    def discard(self, value):
3275        """Remove an element.  Do not raise an exception if absent."""
3276        self._check_hashmap()
3277        obj = value
3278        item_hash = hash(obj)
3279        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3280        field_type_index = item_info_index + 0
3281        item_hash_index = item_info_index + 1
3282        item_bucket_index = item_info_index + 2
3283        field_type = self.hashmap[field_type_index]
3284        if 0 == field_type:
3285            return
3286        elif 1 == field_type:
3287            if (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]):
3288                self.hashmap[field_type_index] = 0
3289                self.hashmap[item_hash_index] = None
3290                self.hashmap[item_bucket_index] = None
3291                self._decrease_size()
3292                return
3293            else:
3294                return
3295        elif 2 == field_type:
3296            bucket_offset = self.hashmap[item_bucket_index]
3297            try:
3298                bucket = self.buckets[item_info_index]
3299                if bucket._offset != bucket_offset:
3300                    raise KeyError
3301            except KeyError:
3302                raise
3303                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3304
3305            for bucket_item_index in range(0, len(bucket), 3):
3306                bucket_field_type = bucket[bucket_item_index + 0]
3307                if 0 == bucket_field_type:
3308                    continue
3309                
3310                bucket_field_hash = bucket[bucket_item_index + 1]
3311                bucket_obj = bucket[bucket_item_index + 2]
3312                if (item_hash == bucket_field_hash) and (obj == bucket_obj):
3313                    bucket[bucket_item_index + 0] = 0
3314                    bucket[bucket_item_index + 1] = None
3315                    bucket[bucket_item_index + 2] = None
3316                    self._decrease_size()
3317                    return
3318            return
3319        else:
3320            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3321
3322    @property
3323    def hash_bits(self) -> int:
3324        return self._hash_bits
3325
3326    @hash_bits.setter
3327    def hash_bits(self, value: int) -> None:
3328        self._hash_bits = value
3329        self._capacity = 2 ** value
3330    
3331    @property
3332    def capacity(self) -> int:
3333        return self._capacity
3334
3335    @capacity.setter
3336    def capacity(self, value: int) -> None:
3337        if value <= self._capacity:
3338            return
3339        
3340        if value <= 2:
3341            self.hash_bits = 1
3342        else:
3343            self.hash_bits = int(ceil(log2(value)))
3344    
3345    def __str__(self) -> str:
3346        self._check_hashmap()
3347        return set(self).__str__()
3348
3349    def __repr__(self) -> str:
3350        self._check_hashmap()
3351        return set(self).__repr__()
3352
3353    def _free_mem(self):
3354        if self._offset is not None:
3355            for _, bucket in self.buckets.items():
3356                self._shared_memory.destroy_obj(bucket._offset)
3357            
3358            self.buckets.clear()
3359            if self.hashmap_offset is not None:
3360                self._shared_memory.destroy_obj(self.hashmap_offset)
3361                self.hashmap_offset = None
3362            
3363            self._shared_memory.free(self._offset)
3364            self._offset = None
3365
3366
3367class IMutableSetIterator:
3368    def __init__(self, iset: IMutableSet) -> None:
3369        self._iset = iset
3370        self._index = 0
3371        self._sub_index = 0
3372    
3373    def __next__(self):
3374        if self._iset._check_hashmap():
3375            raise RuntimeError("Sets's hashmap changed during iteration")
3376
3377        while self._index < self._iset.capacity:
3378            item_info_index: int = self._index * 3
3379            field_type_index = item_info_index + 0
3380            item_hash_index = item_info_index + 1
3381            item_bucket_index = item_info_index + 2
3382            field_type = self._iset.hashmap[field_type_index]
3383            if 0 == field_type:
3384                self._index += 1
3385                continue
3386            elif 1 == field_type:
3387                result = self._iset.hashmap[item_bucket_index]
3388                self._index += 1
3389                return result
3390            elif 2 == field_type:
3391                bucket_offset = self._iset.hashmap[item_bucket_index]
3392                try:
3393                    bucket = self._iset.buckets[item_info_index]
3394                    if bucket._offset != bucket_offset:
3395                        raise KeyError
3396                except KeyError:
3397                    raise
3398                    self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset)
3399
3400                bucket_len = len(bucket)
3401                sub_item_info_index = self._sub_index
3402                while (sub_item_info_index * 3) < bucket_len:
3403                    sub_item_field_type_index = sub_item_info_index * 3 + 0
3404                    if bucket[sub_item_field_type_index] == 0:
3405                        sub_item_info_index += 1
3406                        continue
3407
3408                    sub_item_hash_index = sub_item_info_index * 3 + 1
3409                    sub_item_obj_index = sub_item_info_index * 3 + 2
3410                    result = bucket[sub_item_obj_index]
3411                    self._sub_index += 1
3412                    return result
3413                else:
3414                    self._sub_index = 0
3415                    self._index += 1
3416                    continue
3417            else:
3418                raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3419        else:
3420            raise StopIteration
3421    
3422    def __iter__(self):
3423        return self
3424
3425
3426class IMutableSetIteratorAsOffset:
3427    def __init__(self, iset: IMutableSet, pop: bool = False) -> None:
3428        self._iset = iset
3429        self._pop: bool = pop
3430        self._index = 0
3431        self._sub_index = 0
3432    
3433    def __next__(self):
3434        if self._iset._check_hashmap():
3435            raise RuntimeError("Set's hashmap changed during iteration")
3436
3437        if self._index < self._iset.capacity:
3438            while self._index < self._iset.capacity:
3439                item_info_index: int = self._index * 3
3440                field_type_index = item_info_index + 0
3441                item_hash_index = item_info_index + 1
3442                item_bucket_index = item_info_index + 2
3443                field_type = self._iset.hashmap[field_type_index]
3444                if 0 == field_type:
3445                    self._index += 1
3446                    continue
3447                elif 1 == field_type:
3448                    item_hash = self._iset.hashmap[item_hash_index]
3449                    value_type, value_offset = self._iset.hashmap.getitem_as_offset(item_bucket_index)
3450                    if self._pop:
3451                        self._iset.hashmap[field_type_index] = 0
3452                        self._iset.hashmap[item_hash_index] = None
3453                        self._iset.hashmap.setitem_as_offset(item_bucket_index, (0, 0), False)
3454                    
3455                    self._index += 1
3456                    return (item_hash, value_type, value_offset)
3457                elif 2 == field_type:
3458                    bucket_offset = self._iset.hashmap[item_bucket_index]
3459                    try:
3460                        bucket = self._iset.buckets[item_info_index]
3461                        if bucket._offset != bucket_offset:
3462                            raise KeyError
3463                    except KeyError:
3464                        raise
3465                        self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset)
3466
3467                    bucket_len = len(bucket)
3468                    sub_item_info_index = self._sub_index
3469                    while (sub_item_info_index * 3) < bucket_len:
3470                        sub_item_field_type_index = sub_item_info_index * 3 + 0
3471                        if bucket[sub_item_field_type_index] == 0:
3472                            sub_item_info_index += 1
3473                            continue
3474
3475                        sub_item_hash_index = sub_item_info_index * 3 + 1
3476                        sub_item_obj_index = sub_item_info_index * 3 + 2
3477                        sub_item_hash = bucket[sub_item_hash_index]
3478                        sub_item_value_type, sub_item_value_offset = bucket.getitem_as_offset(sub_item_obj_index)
3479                        if self._pop:
3480                            bucket[sub_item_field_type_index] = 0
3481                            bucket[sub_item_hash_index] = None
3482                            bucket.setitem_as_offset(sub_item_obj_index, (0, 0), False)
3483                        
3484                        self._sub_index += 1
3485                        return (sub_item_hash, sub_item_value_type, sub_item_value_offset)
3486                    else:
3487                        self._sub_index = 0
3488                        self._index += 1
3489                        continue
3490                else:
3491                    raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3492            else:
3493                raise StopIteration
3494        else:
3495            raise StopIteration
3496    
3497    def __iter__(self):
3498        return self
3499
3500
3501class TMutableSet:
3502    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[IMutableSet, Offset, Size]:
3503        obj: IMutableSet = IMutableSet(shared_memory, obj=obj)
3504        return obj, obj._offset, obj._obj_size
3505    
3506    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableSet:
3507        if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset):
3508            raise WrongObjectTypeError
3509        
3510        return IMutableSet(shared_memory, offset)
3511    
3512    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
3513        if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset):
3514            raise WrongObjectTypeError
3515        
3516        obj: IMutableSet = IMutableSet(shared_memory, offset)
3517        obj._free_mem()
3518
3519
3520# ======================================================================================================================
3521# === Mapping =============================================================================================================
3522
3523
3524class ForceMapping(dict):
3525    ...
3526
3527
3528FMapping = ForceMapping
3529forcemapping = ForceMapping
3530fmapping = ForceMapping
3531
3532
3533class MappingOffsets(IntEnum):
3534    size = 0
3535    capacity = 1
3536    hashmap_offset = 2
3537
3538
3539class MappingHashmapFieldTypes(IntEnum):
3540    tnone = 0
3541    tobj = 1
3542    tbucket = 2
3543
3544
3545class MappingHashmapItemOffsets(IntEnum):
3546    field_type = 0
3547    field_hash = 1
3548    key_or_bucket = 2
3549    value_or_none = 3
3550
3551
3552class MappingBucketOffsets(IntEnum):
3553    field_hash = 0
3554    key_obj = 1
3555    value_obj = 2
3556
3557
3558class IMapping(BaseIObject, AbsMapping):
3559    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMapping = None) -> None:
3560        self._shared_memory = shared_memory
3561        self._base_address = shared_memory.base_address
3562        self._obj_size = None
3563        self._offset: Offset = None
3564        self._offset__data: Offset = None
3565        self._offset__size_offset: Offset = None
3566        self._offset__capacity_offset: Offset = None
3567        self._offset__hashmap_offset: Offset = None
3568        self._load_factor = 0.75
3569        self._hash_bits: int = None
3570        self._capacity: int = None
3571        self._size: int = None
3572        self.hashmap: IList = None
3573        self.hashmap_offset: Offset = None
3574        self.buckets: Dict[int, IList] = dict()
3575
3576        if offset is None:
3577            if obj is None:
3578                # obj = frozenset(set())
3579                data_len = 16
3580            else:
3581                data_len = len(obj)
3582
3583            self._size: int = data_len
3584            self.hash_bits = 1
3585            self.capacity = int(ceil(data_len / self._load_factor))
3586
3587            offset, self._obj_size = shared_memory.malloc(ObjectType.tmapping, 24)
3588            try:
3589                self._offset = offset
3590                offset__data = offset + 16
3591                self._offset__data = offset__data
3592                self._offset__size_offset: Offset = offset__data + 0
3593                self._offset__capacity_offset: Offset = offset__data + 8
3594                self._offset__hashmap_offset = offset__data + 16
3595
3596                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
3597                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
3598
3599                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
3600                self.hashmap = cast(IList, self.hashmap)
3601                self.hashmap_offset = hashmap_offset
3602                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
3603                hashmap_capacity = self.capacity * 4
3604                self.hashmap.set_capacity(hashmap_capacity)
3605                self.hashmap.extend_with(hashmap_capacity, 0)
3606                hash_bits: int = self.hash_bits
3607                if obj is not None:
3608                    for key, value in obj.items():
3609                        key_hash = hash(key)
3610                        item_info_index: int = mask_least_significant_bits(key_hash, hash_bits) * 4
3611                        field_type_index = item_info_index + 0
3612                        item_hash_index = item_info_index + 1
3613                        item_bucket_index = item_info_index + 2
3614                        item_value_index = item_info_index + 3
3615                        field_type = self.hashmap[field_type_index]
3616                        if 0 == field_type:
3617                            self.hashmap[field_type_index] = 1
3618                            self.hashmap[item_hash_index] = key_hash
3619                            self.hashmap[item_bucket_index] = key
3620                            self.hashmap[item_value_index] = value
3621                        elif 1 == field_type:
3622                            bucket, bucket_offset, _ = shared_memory.put_obj(list())
3623                            bucket = cast(IList, bucket)
3624                            bucket.set_capacity(3)
3625                            bucket.extend_with(3, 0)
3626                            self.buckets[item_info_index] = bucket
3627                            self.hashmap.move_item_to_list(item_hash_index, bucket, 0)
3628                            self.hashmap.move_item_to_list(item_bucket_index, bucket, 1)
3629                            self.hashmap.move_item_to_list(item_value_index, bucket, 2)
3630                            self.hashmap[field_type_index] = 2
3631                            self.hashmap[item_bucket_index] = bucket_offset
3632                            bucket.append(key_hash)
3633                            bucket.append(key)
3634                            bucket.append(value)
3635                        elif 2 == field_type:
3636                            bucket = self.buckets[item_info_index]
3637                            bucket.append(key_hash)
3638                            bucket.append(key)
3639                            bucket.append(value)
3640                        else:
3641                            raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3642
3643                # print(f'Constructed {self.hashmap=}')
3644                # print(f'\tConstructed buckets:')
3645                # pdi(self.buckets)
3646                # for bucket_index, bucket in self.buckets.items():
3647                #     pdi(bucket)
3648                #     print(f'\t\t{bucket_index}:', bucket)
3649            except:
3650                self._free_mem()
3651                raise
3652        else:
3653            self._offset = offset
3654            offset__data = offset + 16
3655            self._offset__data = offset__data
3656            self._offset__size_offset: Offset = offset__data + 0
3657            self._offset__capacity_offset: Offset = offset__data + 8
3658            self._offset__hashmap_offset = offset__data + 16
3659
3660            self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
3661            self.hash_bits = 1
3662            self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
3663            hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
3664            
3665            self.hashmap_offset = hashmap_offset
3666            self.hashmap = IList(shared_memory, hashmap_offset)
3667            # print(f'Adopted by {type(self)}: {self.hashmap=}')
3668            item_info_index: int = 0
3669            # for item_info_index in range(self.capacity):
3670            #     field_type_index = item_info_index * 4 + 0
3671            #     item_hash_index = item_info_index * 4 + 1
3672            #     item_bucket_index = item_info_index * 4 + 2
3673            #     item_value_index = item_info_index * 4 + 3
3674            #     field_type = self.hashmap[field_type_index]
3675            #     if 0 == field_type:
3676            #         continue
3677            #     elif 1 == field_type:
3678            #         continue
3679            #     elif 2 == field_type:
3680            #         bucket_offset = self.hashmap[item_bucket_index]
3681            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
3682            #     else:
3683            #         raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3684
3685            for item_info_index in range(0, self.capacity * 4, 4):
3686                field_type_index = item_info_index + 0
3687                item_hash_index = item_info_index + 1
3688                item_bucket_index = item_info_index + 2
3689                item_value_index = item_info_index + 3
3690                field_type = self.hashmap[field_type_index]
3691                if 0 == field_type:
3692                    continue
3693                elif 1 == field_type:
3694                    continue
3695                elif 2 == field_type:
3696                    bucket_offset = self.hashmap[item_bucket_index]
3697                    self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
3698                else:
3699                    raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3700
3701            # print(f'\tAdopted buckets:')
3702            # pdi(self.buckets)
3703            # for bucket_index, bucket in self.buckets.items():
3704            #     pdi(bucket)
3705            #     print(f'\t\t{bucket_index}:', bucket)
3706
3707    def __len__(self):
3708        return self._size
3709    
3710    def __iter__(self):
3711        return IMappingIterator(self)
3712    
3713    # def __contains__(self, obj: Hashable) -> bool:
3714    #     item_hash = hash(obj)
3715    #     item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits)
3716    #     field_type_index = item_info_index * 4 + 0
3717    #     item_hash_index = item_info_index * 4 + 1
3718    #     item_bucket_index = item_info_index * 4 + 2
3719    #     item_value_index = item_info_index * 4 + 3
3720    #     field_type = self.hashmap[field_type_index]
3721    #     if 0 == field_type:
3722    #         return False
3723    #     elif 1 == field_type:
3724    #         return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index])
3725    #     elif 2 == field_type:
3726    #         bucket = self.buckets[item_info_index]
3727    #         # for sub_item_info_index in range(len(bucket)):
3728    #         for sub_item_info_index in range(0, len(bucket) * 3, 3):
3729    #             sub_item_hash_index = sub_item_info_index + 0
3730    #             sub_item_key_obj_index = sub_item_info_index + 1
3731    #             sub_item_value_obj_index = sub_item_info_index + 2
3732    #             if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_key_obj_index]):
3733    #                 return True
3734            
3735    #         return False
3736    #     else:
3737    #         raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3738
3739    def __getitem__(self, key: Hashable):
3740        item_hash = hash(key)
3741        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 4
3742        field_type_index = item_info_index + 0
3743        item_hash_index = item_info_index + 1
3744        item_bucket_index = item_info_index + 2
3745        item_value_index = item_info_index + 3
3746        field_type = self.hashmap[field_type_index]
3747        if 0 == field_type:
3748            raise KeyError
3749        elif 1 == field_type:
3750            if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
3751                return self.hashmap[item_value_index]
3752            else:
3753                raise KeyError
3754        elif 2 == field_type:
3755            bucket = self.buckets[item_info_index]
3756            # for sub_item_info_index in range(len(bucket)):
3757            for sub_item_info_index in range(0, len(bucket) * 3, 3):
3758                sub_item_hash_index = sub_item_info_index + 0
3759                sub_item_key_obj_index = sub_item_info_index + 1
3760                sub_item_value_obj_index = sub_item_info_index + 2
3761                if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
3762                    return bucket[sub_item_value_obj_index]
3763            
3764            raise KeyError
3765        else:
3766            raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3767
3768    @property
3769    def hash_bits(self) -> int:
3770        return self._hash_bits
3771
3772    @hash_bits.setter
3773    def hash_bits(self, value: int) -> None:
3774        self._hash_bits = value
3775        self._capacity = 2 ** value
3776    
3777    @property
3778    def capacity(self) -> int:
3779        return self._capacity
3780
3781    @capacity.setter
3782    def capacity(self, value: int) -> None:
3783        if value <= self._capacity:
3784            return
3785        
3786        if value <= 2:
3787            self.hash_bits = 1
3788        else:
3789            self.hash_bits = int(ceil(log2(value)))
3790    
3791    def __str__(self) -> str:
3792        return dict(self).__str__()
3793
3794    def __repr__(self) -> str:
3795        return dict(self).__repr__()
3796
3797    def _free_mem(self):
3798        if self._offset is not None:
3799            for _, bucket in self.buckets.items():
3800                self._shared_memory.destroy_obj(bucket._offset)
3801            
3802            self.buckets.clear()
3803            if self.hashmap_offset is not None:
3804                self._shared_memory.destroy_obj(self.hashmap_offset)
3805                self.hashmap_offset = None
3806            
3807            self._shared_memory.free(self._offset)
3808            self._offset = None
3809
3810
3811class IMappingIterator:
3812    def __init__(self, imapping: IMapping) -> None:
3813        self._imapping = imapping
3814        self._index = 0
3815        self._sub_index = 0
3816    
3817    def __next__(self):
3818        while self._index < self._imapping.capacity:
3819            item_info_index: int = self._index * 4
3820            field_type_index = item_info_index + 0
3821            item_hash_index = item_info_index + 1
3822            item_bucket_index = item_info_index + 2
3823            item_value_index = item_info_index + 3
3824            field_type = self._imapping.hashmap[field_type_index]
3825            if 0 == field_type:
3826                self._index += 1
3827                continue
3828            elif 1 == field_type:
3829                result = self._imapping.hashmap[item_bucket_index]
3830                self._index += 1
3831                break
3832            elif 2 == field_type:
3833                bucket = self._imapping.buckets[item_info_index]
3834                sub_item_info_index = self._sub_index
3835                sub_item_hash_index = sub_item_info_index * 3 + 0
3836                sub_item_key_obj_index = sub_item_info_index * 3 + 1
3837                sub_item_value_obj_index = sub_item_info_index * 3 + 2
3838                if (sub_item_info_index * 3) >= len(bucket):
3839                    self._sub_index = 0
3840                    self._index += 1
3841                    continue
3842
3843                result = bucket[sub_item_key_obj_index]
3844                self._sub_index += 1
3845                break
3846            else:
3847                raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3848        else:
3849            raise StopIteration
3850
3851        return result
3852    
3853    def __iter__(self):
3854        return self
3855
3856
3857class TMapping:
3858    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMapping) -> Tuple[IMapping, Offset, Size]:
3859        obj: IMapping = IMapping(shared_memory, obj=obj)
3860        return obj, obj._offset, obj._obj_size
3861    
3862    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMapping:
3863        if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset):
3864            raise WrongObjectTypeError
3865        
3866        return IMapping(shared_memory, offset)
3867    
3868    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
3869        if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset):
3870            raise WrongObjectTypeError
3871        
3872        obj: IMapping = IMapping(shared_memory, offset)
3873        obj._free_mem()
3874
3875
3876# ======================================================================================================================
3877# === MutableMapping =============================================================================================================
3878
3879
3880class MutableMappingOffsets(IntEnum):
3881    size = 0
3882    capacity = 1
3883    hashmap_offset = 2
3884    refresh_counter = 3
3885
3886
3887class MutableMappingHashmapFieldTypes(IntEnum):
3888    tnone = 0
3889    tobj = 1
3890    tbucket = 2
3891
3892
3893class MutableMappingHashmapItemOffsets(IntEnum):
3894    field_type = 0
3895    field_hash = 1
3896    key_or_bucket = 2
3897    value_or_none = 3
3898
3899
3900class MutableMappingBucketFieldTypes(IntEnum):
3901    tnone = 0
3902    tobj = 1
3903
3904
3905class MutableMappingBucketOffsets(IntEnum):
3906    field_type = 0
3907    field_hash = 1
3908    key_obj = 2
3909    value_obj = 3
3910
3911
3912class IMutableMapping(BaseIObject, AbsMutableMapping):
3913    __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', 'hashmap_offset', 'buckets', '_refresh_counter', '_offset__refresh_counter_offset', 'ignore_rehash')
3914
3915    # @property
3916    # def __mro__(self) -> Tuple:
3917    #     return BaseIObject, AbsMutableMapping, dict
3918
3919    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableMapping = None) -> None:
3920        self._shared_memory = shared_memory
3921        self._base_address = shared_memory.base_address
3922        self._obj_size = None
3923        self._offset: Offset = None
3924        self._offset__data: Offset = None
3925        self._offset__size_offset: Offset = None
3926        self._offset__capacity_offset: Offset = None
3927        self._offset__hashmap_offset: Offset = None
3928        self._offset__refresh_counter_offset: Offset = None
3929        self._load_factor = 0.75
3930        self._load_factor_2 = 0.5625
3931        self._hash_bits: int = None
3932        self._capacity: int = None
3933        self._min_capacity: int = None
3934        self._size: int = None
3935        self.hashmap: IList = None
3936        self._refresh_counter: int = 0
3937        self.hashmap_offset: Offset = None
3938        self.buckets: Dict[int, IList] = dict()
3939
3940        self.ignore_rehash: bool = True
3941
3942        if offset is None:
3943            if obj is None:
3944                # obj = frozenset(set())
3945                data_len = 16
3946            else:
3947                data_len = len(obj)
3948
3949            self._size: int = 0
3950            self.hash_bits = 1
3951            self.capacity = int(ceil(data_len / self._load_factor))
3952            self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
3953
3954            offset, self._obj_size = shared_memory.malloc(ObjectType.tmutablemapping, 32)
3955            created_items_offsets: List[Offset] = list()
3956            try:
3957                self._offset = offset
3958                offset__data = offset + 16
3959                self._offset__data = offset__data
3960                self._offset__size_offset = offset__data + 0
3961                self._offset__capacity_offset = offset__data + 8
3962                self._offset__hashmap_offset = offset__data + 16
3963                self._offset__refresh_counter_offset = offset__data + 24
3964
3965                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
3966                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
3967                write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter)
3968
3969                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
3970                self.hashmap = cast(IList, self.hashmap)
3971                self.hashmap_offset = hashmap_offset
3972                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
3973                hashmap_capacity = self.capacity * 4
3974                self.hashmap.set_capacity(hashmap_capacity)
3975                self.hashmap.extend_with(hashmap_capacity, 0)
3976                hash_bits: int = self.hash_bits
3977                if obj is None:
3978                    pass
3979                elif isinstance(obj, IMutableMapping):
3980                    self._move_from(obj)
3981                else:
3982                    for key, value in obj.items():
3983                        self.__setitem__(key, value)
3984                
3985                self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
3986
3987                self.ignore_rehash = False
3988
3989                # print(f'Constructed {self.hashmap=}')
3990                # print(f'\tConstructed buckets:')
3991                # pdi(self.buckets)
3992                # for bucket_index, bucket in self.buckets.items():
3993                #     pdi(bucket)
3994                #     print(f'\t\t{bucket_index}:', bucket)
3995            except:
3996                self._free_mem()
3997                raise
3998        else:
3999            self._refresh_hashmap(offset)
4000            self.ignore_rehash = False
4001
4002            # self._offset = offset
4003            # offset__data = offset + 16
4004            # self._offset__data = offset__data
4005            # self._offset__size_offset: Offset = offset__data + 0
4006            # self._offset__capacity_offset: Offset = offset__data + 8
4007            # self._offset__hashmap_offset = offset__data + 16
4008
4009            # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
4010            # self.hash_bits = 1
4011            # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
4012            # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
4013            # self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
4014            
4015            # self.hashmap_offset = hashmap_offset
4016            # self.hashmap = IList(shared_memory, hashmap_offset)
4017            # # print(f'Adopted by {type(self)}: {self.hashmap=}')
4018            # item_info_index: int = 0
4019            # # for item_info_index in range(self.capacity):
4020            # #     field_type_index = item_info_index * 4 + 0
4021            # #     item_hash_index = item_info_index * 4 + 1
4022            # #     item_bucket_index = item_info_index * 4 + 2
4023            # #     item_value_index = item_info_index * 4 + 3
4024            # #     field_type = self.hashmap[field_type_index]
4025            # #     if 0 == field_type:
4026            # #         continue
4027            # #     elif 1 == field_type:
4028            # #         continue
4029            # #     elif 2 == field_type:
4030            # #         bucket_offset = self.hashmap[item_bucket_index]
4031            # #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4032            # #     else:
4033            # #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4034            
4035            # for item_info_index in range(0, self.capacity * 4, 4):
4036            #     field_type_index = item_info_index + 0
4037            #     item_hash_index = item_info_index + 1
4038            #     item_bucket_index = item_info_index + 2
4039            #     item_value_index = item_info_index + 3
4040            #     field_type = self.hashmap[field_type_index]
4041            #     if 0 == field_type:
4042            #         continue
4043            #     elif 1 == field_type:
4044            #         continue
4045            #     elif 2 == field_type:
4046            #         bucket_offset = self.hashmap[item_bucket_index]
4047            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4048            #     else:
4049            #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4050
4051            # self.ignore_rehash = False
4052            
4053            # # print(f'\tAdopted by {type(self)} buckets:')
4054            # # pdi(self.buckets)
4055            # # for bucket_index, bucket in self.buckets.items():
4056            # #     pdi(bucket)
4057            # #     print(f'\t\t{bucket_index}:', bucket)
4058
4059    def _refresh_hashmap(self, offset: Offset):
4060        # print(f'~ refresh_hashmap {offset}: {intro_func_repr_limited()}')
4061
4062        # ignore_rehash = self.ignore_rehash
4063        # self.ignore_rehash = True
4064
4065        self._hash_bits = None
4066        self._capacity = None
4067        self._min_capacity = None
4068        self._size = None
4069        self.hashmap = None
4070        self._refresh_counter = 0
4071        self.hashmap_offset = None
4072        self.buckets = dict()
4073
4074        shared_memory = self._shared_memory
4075        self._offset = offset
4076        offset__data = offset + 16
4077        self._offset__data = offset__data
4078        self._offset__size_offset: Offset = offset__data + 0
4079        self._offset__capacity_offset: Offset = offset__data + 8
4080        self._offset__hashmap_offset = offset__data + 16
4081        self._offset__refresh_counter_offset = offset__data + 24
4082
4083        self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
4084        self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
4085        self.hash_bits = 1
4086        self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
4087        hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
4088        self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
4089        
4090        self.hashmap_offset = hashmap_offset
4091        self.hashmap = IList(shared_memory, hashmap_offset)
4092        # print(f'Adopted by {type(self)}: {self.hashmap=}')
4093        # item_info_index: int = 0
4094        # for item_info_index in range(self.capacity):
4095        #     field_type_index = item_info_index * 4 + 0
4096        #     item_hash_index = item_info_index * 4 + 1
4097        #     item_bucket_index = item_info_index * 4 + 2
4098        #     item_value_index = item_info_index * 4 + 3
4099        #     field_type = self.hashmap[field_type_index]
4100        #     if 0 == field_type:
4101        #         continue
4102        #     elif 1 == field_type:
4103        #         continue
4104        #     elif 2 == field_type:
4105        #         bucket_offset = self.hashmap[item_bucket_index]
4106        #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4107        #     else:
4108        #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4109        
4110        for item_info_index in range(0, self.capacity * 4, 4):
4111            field_type_index = item_info_index + 0
4112            item_hash_index = item_info_index + 1
4113            item_bucket_index = item_info_index + 2
4114            item_value_index = item_info_index + 3
4115            field_type = self.hashmap[field_type_index]
4116            if 0 == field_type:
4117                continue
4118            elif 1 == field_type:
4119                continue
4120            elif 2 == field_type:
4121                bucket_offset = self.hashmap[item_bucket_index]
4122                self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4123            else:
4124                raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4125
4126        # self.ignore_rehash = ignore_rehash
4127    
4128    @property
4129    def refresh_counter(self):
4130        return read_uint64(self._base_address, self._offset__refresh_counter_offset)
4131    
4132    def _increase_refresh_counter(self):
4133        if self.ignore_rehash:
4134            # print(f'~ ignore increase_refresh_counter {self._offset}: {intro_func_repr_limited()}')
4135            pass
4136        else:
4137            # print(f'~ increase_refresh_counter {self._offset}: {intro_func_repr_limited()}')
4138            # refresh_counter = read_uint64(self._base_address, self._offset__refresh_counter_offset)
4139            # if self._refresh_counter != refresh_counter:
4140            #     print('~!!! increase_refresh_counter')
4141            
4142            self._refresh_counter += 1
4143            write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter)
4144    
4145    def _check_hashmap(self):
4146        if self.ignore_rehash:
4147            # print(f'~ ignore check_hashmap {self._offset}: {intro_func_repr_limited()}')
4148            return False
4149        else:
4150            base_address = self._base_address
4151            refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset)
4152            # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset)
4153            # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset):
4154            if self._refresh_counter != refresh_counter:
4155                # print(f'~ check_hashmap {self._offset}: {intro_func_repr_limited()}')
4156                self._refresh_hashmap(self._offset)
4157                return True
4158            
4159            return False
4160
4161    # @property
4162    # def hashmap(self) -> IList:
4163    #     if self.ignore_rehash:
4164    #         return self._hashmap
4165    #     else:
4166    #         self._check_hashmap()
4167    #         return self._hashmap
4168    
4169    # @hashmap.setter
4170    # def hashmap(self, value: IList):
4171    #     self._hashmap = value
4172
4173    def _increase_size(self):
4174        self._size += 1
4175        write_uint64(self._base_address, self._offset__size_offset, self._size)
4176        if (self._size > self._capacity) or (self._size < self._min_capacity):
4177            self._rehash()
4178    
4179    def _decrease_size(self):
4180        self._size -= 1
4181        if self._size < 0:
4182            raise RuntimeError('Size of the set is negative')
4183
4184        write_uint64(self._base_address, self._offset__size_offset, self._size)
4185        if (self._size > self._capacity) or (self._size < self._min_capacity):
4186            self._rehash()
4187    
4188    def _move_from(self, other: 'IMutableMapping'):
4189        for key_hash, key_type, key_offset, value_type, value_offset in other.iter_offset_pop():
4190            self.setitem_as_offset(key_hash, key_type, key_offset, value_type, value_offset)
4191    
4192    def _rehash(self):
4193        if self.ignore_rehash:
4194            # print(f'~ ignore rehash {self._offset}: {intro_func_repr_limited()}')
4195            return 
4196        
4197        # print(f'~ rehash {self._offset}: {intro_func_repr_limited()}')
4198        self._increase_refresh_counter()
4199
4200        ignore_rehash = self.ignore_rehash
4201        self.ignore_rehash = True
4202
4203        new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self)
4204        new_other = cast(IMutableMapping, new_other)
4205
4206        other_capacity = new_other._capacity
4207        other_hash_bits = new_other._hash_bits
4208        other_min_capacity = new_other._min_capacity
4209        other_size = new_other._size
4210        # refresh_counter = new_other._refresh_counter
4211        other_hashmap = new_other.hashmap
4212        other_hashmap_offset = new_other.hashmap_offset
4213        other_buckets = new_other.buckets
4214        other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset)
4215        other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset)
4216        other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset)
4217        # refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset)
4218        
4219        new_other._capacity = self._capacity
4220        new_other._hash_bits = self._hash_bits
4221        new_other._min_capacity = self._min_capacity
4222        new_other._size = self._size
4223        # new_other._refresh_counter = self._refresh_counter
4224        new_other.hashmap = self.hashmap
4225        new_other.hashmap_offset = self.hashmap_offset
4226        new_other.buckets = self.buckets
4227        write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset))
4228        write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset))
4229        write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset))
4230        # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset))
4231
4232        self._capacity = other_capacity
4233        self._hash_bits = other_hash_bits
4234        self._min_capacity = other_min_capacity
4235        self._size = other_size
4236        # self._refresh_counter = refresh_counter
4237        self.hashmap = other_hashmap
4238        self.hashmap_offset = other_hashmap_offset
4239        self.buckets = other_buckets
4240        write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin)
4241        write_uint64(self._base_address, self._offset__size_offset, other_size_bin)
4242        write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin)
4243        # write_uint64(self._base_address, self._offset__refresh_counter_offset, refresh_counter_bin)
4244
4245        self._shared_memory.destroy_obj(new_other_offset)
4246
4247        self.ignore_rehash = ignore_rehash
4248
4249    def __len__(self):
4250        self._check_hashmap()
4251        return self._size
4252    
4253    def __iter__(self):
4254        self._check_hashmap()
4255        return IMutableMappingIterator(self)
4256    
4257    def iter_offset(self):
4258        self._check_hashmap()
4259        return IMutableMappingIteratorAsOffset(self)
4260    
4261    def iter_offset_pop(self):
4262        self._check_hashmap()
4263        return IMutableMappingIteratorAsOffset(self, True)
4264    
4265    # def __contains__(self, key: Hashable) -> bool:
4266    #     item_hash = hash(key)
4267    #     item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits)
4268    #     field_type_index = item_info_index * 4 + 0
4269    #     item_hash_index = item_info_index * 4 + 1
4270    #     item_bucket_index = item_info_index * 4 + 2
4271    #     item_value_index = item_info_index * 4 + 3
4272    #     field_type = self.hashmap[field_type_index]
4273    #     if 0 == field_type:
4274    #         return False
4275    #     elif 1 == field_type:
4276    #         return (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index])
4277    #     elif 2 == field_type:
4278    #         bucket = self.buckets[item_info_index]
4279    #         for sub_item_info_index in range(0, len(bucket), 4):
4280    #             bucket_field_type = bucket[sub_item_info_index + 0]
4281    #             if 0 == bucket_field_type:
4282    #                 continue
4283
4284    #             sub_item_hash_index = sub_item_info_index + 1
4285    #             sub_item_key_obj_index = sub_item_info_index + 2
4286    #             sub_item_value_obj_index = sub_item_info_index + 3
4287    #             if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
4288    #                 return True
4289            
4290    #         return False
4291    #     else:
4292    #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4293
4294    def __getitem__(self, key: Hashable):
4295        self._check_hashmap()
4296        item_hash = hash(key)
4297        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 4
4298        field_type_index = item_info_index + 0
4299        item_hash_index = item_info_index + 1
4300        item_bucket_index = item_info_index + 2
4301        item_value_index = item_info_index + 3
4302        field_type = self.hashmap[field_type_index]
4303        if 0 == field_type:
4304            raise KeyError
4305        elif 1 == field_type:
4306            if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
4307                return self.hashmap[item_value_index]
4308            else:
4309                raise KeyError
4310        elif 2 == field_type:
4311            bucket_offset = self.hashmap[item_bucket_index]
4312            try:
4313                bucket = self.buckets[item_info_index]
4314                if bucket._offset != bucket_offset:
4315                    raise KeyError
4316            except KeyError:
4317                raise
4318                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4319
4320            for sub_item_info_index in range(0, len(bucket), 4):
4321                bucket_field_type = bucket[sub_item_info_index + 0]
4322                if 0 == bucket_field_type:
4323                    continue
4324
4325                sub_item_hash_index = sub_item_info_index + 1
4326                sub_item_key_obj_index = sub_item_info_index + 2
4327                sub_item_value_obj_index = sub_item_info_index + 3
4328                if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
4329                    return bucket[sub_item_value_obj_index]
4330            
4331            raise KeyError
4332        else:
4333            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4334
4335    def __setitem__(self, key, value):
4336        self._check_hashmap()
4337        key_hash = hash(key)
4338        item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * 4
4339        field_type_index = item_info_index + 0
4340        item_hash_index = item_info_index + 1
4341        item_bucket_index = item_info_index + 2
4342        item_value_index = item_info_index + 3
4343        field_type = self.hashmap[field_type_index]
4344        if 0 == field_type:
4345            self.hashmap[field_type_index] = 1
4346            self.hashmap[item_hash_index] = key_hash
4347            self.hashmap[item_bucket_index] = key
4348            self.hashmap[item_value_index] = value
4349            self._increase_size()
4350            return
4351        elif 1 == field_type:
4352            if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
4353                self.hashmap[item_value_index] = value
4354                return
4355            
4356            self._increase_refresh_counter()
4357            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
4358            bucket = cast(IList, bucket)
4359            bucket.set_capacity(4)
4360            bucket.extend_with(4, 0)
4361            self.buckets[item_info_index] = bucket
4362            bucket[0] = 1
4363            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
4364            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
4365            self.hashmap.move_item_to_list(item_value_index, bucket, 3)
4366            self.hashmap[field_type_index] = 2
4367            self.hashmap[item_bucket_index] = bucket_offset
4368            bucket.append(1)
4369            bucket.append(key_hash)
4370            bucket.append(key)
4371            bucket.append(value)
4372            self._increase_size()
4373            return
4374        elif 2 == field_type:
4375            bucket_offset = self.hashmap[item_bucket_index]
4376            try:
4377                bucket = self.buckets[item_info_index]
4378                if bucket._offset != bucket_offset:
4379                    raise KeyError
4380            except KeyError:
4381                raise
4382                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4383
4384            bucket_len: int = len(bucket)
4385            for bucket_item_index in range(0, bucket_len, 4):
4386                bucket_field_type = bucket[bucket_item_index + 0]
4387                if 1 == bucket_field_type:
4388                    if (key_hash == bucket[bucket_item_index + 1]) and (key == bucket[bucket_item_index + 2]):
4389                        bucket[bucket_item_index + 3] = value
4390                        return
4391            
4392            for bucket_item_index in range(0, bucket_len, 4):
4393                bucket_field_type = bucket[bucket_item_index + 0]
4394                if 0 == bucket_field_type:
4395                    bucket[bucket_item_index + 0] = 1
4396                    bucket[bucket_item_index + 1] = key_hash
4397                    bucket[bucket_item_index + 2] = key
4398                    bucket[bucket_item_index + 3] = value
4399                    self._increase_size()
4400                    return
4401            else:
4402                bucket.append(1)
4403                bucket.append(key_hash)
4404                bucket.append(key)
4405                bucket.append(value)
4406                self._increase_size()
4407                return
4408        else:
4409            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4410
4411    def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset):
4412        self._check_hashmap()
4413        key = (key_type, key_offset)
4414        value = (value_type, value_offset)
4415        item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * 4
4416        field_type_index = item_info_index + 0
4417        item_hash_index = item_info_index + 1
4418        item_bucket_index = item_info_index + 2
4419        item_value_index = item_info_index + 3
4420        field_type = self.hashmap[field_type_index]
4421        if 0 == field_type:
4422            self.hashmap[field_type_index] = 1
4423            self.hashmap[item_hash_index] = key_hash
4424            self.hashmap.setitem_as_offset(item_bucket_index, key)
4425            self.hashmap.setitem_as_offset(item_value_index, value)
4426            self._increase_size()
4427            return
4428        elif 1 == field_type:
4429            if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap.getitem_as_offset(item_bucket_index)):
4430                self.hashmap.setitem_as_offset(item_value_index, value)
4431                return
4432            
4433            self._increase_refresh_counter()
4434            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
4435            bucket = cast(IList, bucket)
4436            bucket.set_capacity(4)
4437            bucket.extend_with(4, 0)
4438            self.buckets[item_info_index] = bucket
4439            bucket[0] = 1
4440            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
4441            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
4442            self.hashmap.move_item_to_list(item_value_index, bucket, 3)
4443            self.hashmap[field_type_index] = 2
4444            self.hashmap[item_bucket_index] = bucket_offset
4445            bucket.append(1)
4446            bucket.append(key_hash)
4447            bucket.append_as_offset(key)
4448            bucket.append_as_offset(value)
4449            self._increase_size()
4450            return
4451        elif 2 == field_type:
4452            bucket_offset = self.hashmap[item_bucket_index]
4453            try:
4454                bucket = self.buckets[item_info_index]
4455                if bucket._offset != bucket_offset:
4456                    raise KeyError
4457            except KeyError:
4458                raise
4459                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4460
4461            bucket_len: int = len(bucket)
4462            for bucket_item_index in range(0, bucket_len, 4):
4463                bucket_field_type = bucket[bucket_item_index + 0]
4464                if 1 == bucket_field_type:
4465                    if (key_hash == bucket[bucket_item_index + 1]) and (key == bucket.getitem_as_offset(bucket_item_index + 2)):
4466                        bucket.setitem_as_offset(bucket_item_index + 3, value)
4467                        return
4468            
4469            for bucket_item_index in range(0, bucket_len, 4):
4470                bucket_field_type = bucket[bucket_item_index + 0]
4471                if 0 == bucket_field_type:
4472                    bucket[bucket_item_index + 0] = 1
4473                    bucket[bucket_item_index + 1] = key_hash
4474                    bucket.setitem_as_offset(bucket_item_index + 2, key)
4475                    bucket.setitem_as_offset(bucket_item_index + 3, value)
4476                    self._increase_size()
4477                    return
4478            else:
4479                bucket.append(1)
4480                bucket.append(key_hash)
4481                bucket.append_as_offset(key)
4482                bucket.append_as_offset(value)
4483                self._increase_size()
4484                return
4485        else:
4486            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4487
4488    def __delitem__(self, key):
4489        self._check_hashmap()
4490        item_hash = hash(key)
4491        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 4
4492        field_type_index = item_info_index + 0
4493        item_hash_index = item_info_index + 1
4494        item_bucket_index = item_info_index + 2
4495        item_value_index = item_info_index + 3
4496        field_type = self.hashmap[field_type_index]
4497        if 0 == field_type:
4498            raise KeyError
4499        elif 1 == field_type:
4500            if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
4501                self.hashmap[field_type_index] = 0
4502                self.hashmap[item_hash_index] = None
4503                self.hashmap[item_bucket_index] = None
4504                self.hashmap[item_value_index] = None
4505                self._decrease_size()
4506                return
4507            else:
4508                raise KeyError
4509        elif 2 == field_type:
4510            bucket_offset = self.hashmap[item_bucket_index]
4511            try:
4512                bucket = self.buckets[item_info_index]
4513                if bucket._offset != bucket_offset:
4514                    raise KeyError
4515            except KeyError:
4516                raise
4517                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4518
4519            for sub_item_info_index in range(0, len(bucket), 4):
4520                bucket_field_type = bucket[sub_item_info_index + 0]
4521                if 0 == bucket_field_type:
4522                    continue
4523
4524                sub_item_hash_index = sub_item_info_index + 1
4525                sub_item_key_obj_index = sub_item_info_index + 2
4526                sub_item_value_obj_index = sub_item_info_index + 3
4527                if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
4528                    bucket[sub_item_info_index + 0] = 0
4529                    bucket[sub_item_hash_index] = None
4530                    bucket[sub_item_key_obj_index] = None
4531                    bucket[sub_item_value_obj_index] = None
4532                    self._decrease_size()
4533                    return
4534
4535            raise KeyError
4536        else:
4537            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4538
4539    @property
4540    def hash_bits(self) -> int:
4541        return self._hash_bits
4542
4543    @hash_bits.setter
4544    def hash_bits(self, value: int) -> None:
4545        self._hash_bits = value
4546        self._capacity = 2 ** value
4547    
4548    @property
4549    def capacity(self) -> int:
4550        return self._capacity
4551
4552    @capacity.setter
4553    def capacity(self, value: int) -> None:
4554        if value <= self._capacity:
4555            return
4556        
4557        if value <= 2:
4558            self.hash_bits = 1
4559        else:
4560            self.hash_bits = int(ceil(log2(value)))
4561    
4562    def __str__(self) -> str:
4563        self._check_hashmap()
4564        return dict(self).__str__()
4565
4566    def __repr__(self) -> str:
4567        self._check_hashmap()
4568        return dict(self).__repr__()
4569
4570    def _free_mem(self):
4571        if self._offset is not None:
4572            if self.hashmap_offset is not None:
4573                self._check_hashmap()
4574            
4575            for _, bucket in self.buckets.items():
4576                self._shared_memory.destroy_obj(bucket._offset)
4577            self.buckets.clear()
4578            if self.hashmap_offset is not None:
4579                self._shared_memory.destroy_obj(self.hashmap_offset)
4580                self.hashmap_offset = None
4581            
4582            self._shared_memory.free(self._offset)
4583            self._offset = None
4584
4585
4586class IMutableMappingIterator:
4587    def __init__(self, imapping: IMutableMapping) -> None:
4588        self._imapping = imapping
4589        self._index = 0
4590        self._sub_index = 0
4591    
4592    def __next__(self):
4593        if self._imapping._check_hashmap():
4594            raise RuntimeError("Dictionary's hashmap changed during iteration")
4595
4596        while self._index < self._imapping.capacity:
4597            item_info_index: int = self._index * 4
4598            field_type_index = item_info_index + 0
4599            item_hash_index = item_info_index + 1
4600            item_bucket_index = item_info_index + 2
4601            item_value_index = item_info_index + 3
4602            field_type = self._imapping.hashmap[field_type_index]
4603            if 0 == field_type:
4604                self._index += 1
4605                continue
4606            elif 1 == field_type:
4607                result = self._imapping.hashmap[item_bucket_index]
4608                self._index += 1
4609                return result
4610            elif 2 == field_type:
4611                bucket_offset = self._imapping.hashmap[item_bucket_index]
4612                try:
4613                    bucket = self._imapping.buckets[item_info_index]
4614                    if bucket._offset != bucket_offset:
4615                        raise KeyError
4616                except KeyError:
4617                    raise
4618                    self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset)
4619
4620                bucket_len = len(bucket)
4621                sub_item_info_index = self._sub_index
4622                while (sub_item_info_index * 4) < bucket_len:
4623                    sub_item_field_type_index = sub_item_info_index * 4 + 0
4624                    if bucket[sub_item_field_type_index] == 0:
4625                        sub_item_info_index += 1
4626                        continue
4627
4628                    sub_item_hash_index = sub_item_info_index * 4 + 1
4629                    sub_item_key_obj_index = sub_item_info_index * 4 + 2
4630                    sub_item_value_obj_index = sub_item_info_index * 4 + 3
4631                    result = bucket[sub_item_key_obj_index]
4632                    self._sub_index += 1
4633                    return result
4634                else:
4635                    self._sub_index = 0
4636                    self._index += 1
4637                    continue
4638            else:
4639                raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4640        else:
4641            raise StopIteration
4642    
4643    def __iter__(self):
4644        return self
4645
4646
4647class IMutableMappingIteratorAsOffset:
4648    def __init__(self, imapping: IMutableMapping, pop: bool = False) -> None:
4649        self._imapping = imapping
4650        self._pop: bool = pop
4651        self._index = 0
4652        self._sub_index = 0
4653    
4654    def __next__(self):
4655        if self._imapping._check_hashmap():
4656            raise RuntimeError("Dictionary's hashmap changed during iteration")
4657
4658        while self._index < self._imapping.capacity:
4659            item_info_index: int = self._index * 4
4660            field_type_index = item_info_index + 0
4661            item_hash_index = item_info_index + 1
4662            item_bucket_index = item_info_index + 2
4663            item_value_index = item_info_index + 3
4664            field_type = self._imapping.hashmap[field_type_index]
4665            if 0 == field_type:
4666                self._index += 1
4667                continue
4668            elif 1 == field_type:
4669                key_hash = self._imapping.hashmap[item_hash_index]
4670                key_type, key_offset = self._imapping.hashmap.getitem_as_offset(item_bucket_index)
4671                value_type, value_offset = self._imapping.hashmap.getitem_as_offset(item_value_index)
4672                if self._pop:
4673                    self._imapping.hashmap[field_type_index] = 0
4674                    self._imapping.hashmap[item_hash_index] = None
4675                    self._imapping.hashmap.setitem_as_offset(item_bucket_index, (0, 0), False)
4676                    self._imapping.hashmap.setitem_as_offset(item_value_index, (0, 0), False)
4677
4678                self._index += 1
4679                return key_hash, key_type, key_offset, value_type, value_offset
4680            elif 2 == field_type:
4681                bucket_offset = self._imapping.hashmap[item_bucket_index]
4682                try:
4683                    bucket = self._imapping.buckets[item_info_index]
4684                    if bucket._offset != bucket_offset:
4685                        raise KeyError
4686                except KeyError:
4687                    raise
4688                    self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset)
4689
4690                bucket_len = len(bucket)
4691                sub_item_info_index = self._sub_index
4692                while (sub_item_info_index * 4) < bucket_len:
4693                    sub_item_field_type_index = sub_item_info_index * 4 + 0
4694                    if bucket[sub_item_field_type_index] == 0:
4695                        sub_item_info_index += 1
4696                        continue
4697
4698                    sub_item_hash_index = sub_item_info_index * 4 + 1
4699                    sub_item_key_obj_index = sub_item_info_index * 4 + 2
4700                    sub_item_value_obj_index = sub_item_info_index * 4 + 3
4701
4702                    key_hash = bucket[sub_item_hash_index]
4703                    key_type, key_offset = bucket.getitem_as_offset(sub_item_key_obj_index)
4704                    value_type, value_offset = bucket.getitem_as_offset(sub_item_value_obj_index)
4705                    if self._pop:
4706                        bucket[sub_item_field_type_index] = 0
4707                        bucket[sub_item_hash_index] = None
4708                        bucket.setitem_as_offset(sub_item_key_obj_index, (0, 0), False)
4709                        bucket.setitem_as_offset(sub_item_value_obj_index, (0, 0), False)
4710
4711                    self._sub_index += 1
4712                    return key_hash, key_type, key_offset, value_type, value_offset
4713                else:
4714                    self._sub_index = 0
4715                    self._index += 1
4716                    continue
4717            else:
4718                raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4719        else:
4720            raise StopIteration
4721    
4722    def __iter__(self):
4723        return self
4724
4725
4726class TMutableMapping:
4727    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMutableMapping) -> Tuple[IMutableMapping, Offset, Size]:
4728        obj: IMutableMapping = IMutableMapping(shared_memory, obj=obj)
4729        return obj, obj._offset, obj._obj_size
4730    
4731    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableMapping:
4732        if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset):
4733            raise WrongObjectTypeError
4734        
4735        return IMutableMapping(shared_memory, offset)
4736    
4737    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
4738        if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset):
4739            raise WrongObjectTypeError
4740        
4741        obj: IMutableMapping = IMutableMapping(shared_memory, offset)
4742        obj._free_mem()
4743
4744
4745# ======================================================================================================================
4746# === General Object =============================================================================================================
4747
4748
4749class ForceGeneralObjectCopy:
4750    def __init__(self, obj: Any) -> None:
4751        self.obj = obj
4752
4753
4754FGeneralObjectCopy = ForceGeneralObjectCopy
4755forcegeneralobjectcopy = ForceGeneralObjectCopy
4756fgeneralobjectcopy = ForceGeneralObjectCopy
4757
4758
4759class ForceGeneralObjectInplace:
4760    def __init__(self, obj: Any) -> None:
4761        self.obj = obj
4762
4763
4764FGeneralObjectInplace = ForceGeneralObjectInplace
4765forcegeneralobjectinplace = ForceGeneralObjectInplace
4766fgeneralobjectinplace = ForceGeneralObjectInplace
4767
4768
4769class GeneralObjectOffsets(IntEnum):
4770    pickled_obj = 0
4771    obj_dict = 1
4772    setable_data_descriptor_field_names = 2
4773
4774
4775def tgeneralobject_custom_getattribute(self, name):
4776    if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'):
4777        return object.__getattribute__(self, name)
4778    
4779    try:
4780        return self._tgeneralobject_imutablemapping_attributes[name]
4781    except KeyError:
4782        pass
4783    
4784    return object.__getattribute__(self, name)
4785
4786
4787def tgeneralobject_custom_setattr(self, name, value):
4788    if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'):
4789        object.__setattr__(self, name, value)
4790    else:
4791        if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
4792            object.__setattr__(self, name, value)
4793            return
4794
4795        # try:
4796        #     if name in self._tgeneralobject_setable_data_descriptor_field_names:
4797        #         object.__setattr__(self, name, value)
4798        # except AttributeError:
4799        #     pass
4800        
4801        self._tgeneralobject_imutablemapping_attributes[name] = value
4802
4803
4804def tgeneralobject_custom_delattr(self, name):
4805    if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'):
4806        object.__delattr__(self, name)
4807    else:
4808        has_value_static: bool = False
4809        value_static = None
4810        try:
4811            value_static = getattr_static(self, name)
4812            has_value_static = True
4813        except AttributeError:
4814            pass
4815
4816        deleted: bool = False
4817        try:
4818            if has_value_static and isfunction(value_static) or ismethod(value_static) or isinstance(value_static, FrameType) or isinstance(value_static, CodeType) or ismethoddescriptor(value_static):
4819                object.__delattr__(self, name)
4820                return
4821        except AttributeError:
4822            pass
4823
4824        try:
4825            if has_value_static and (not isclass(value_static)) and hasattr(value_static, "__delete__"):
4826                object.__delattr__(self, name)
4827                deleted = True
4828        except AttributeError:
4829            pass
4830        
4831        try:
4832            del self._tgeneralobject_imutablemapping_attributes[name]
4833            return
4834        except KeyError:
4835            pass
4836        
4837        if not deleted:
4838            object.__delattr__(self, name)
4839
4840
4841def tgeneralobject_wrap_obj(obj, mapped_obj_dict: IMutableMapping, setable_data_descriptor_field_names: Set[str], init_mapped_obj_dict: bool):
4842    base = obj.__class__
4843    setattr(obj, '_tgeneralobject_imutablemapping_attributes', mapped_obj_dict)
4844    setattr(obj, '_tgeneralobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names)
4845    if init_mapped_obj_dict:
4846        object_fields = set(dir(object))
4847        obj_fields = set(dir(obj)) - object_fields
4848        for key in obj_fields:
4849            value = getattr_static(obj, key)
4850            if key in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or key.startswith('__'):
4851                continue
4852
4853            if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
4854                continue
4855
4856            if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))):
4857                continue
4858
4859            if is_setable_data_descriptor(value):
4860                setable_data_descriptor_field_names.add(key)
4861            
4862            mapped_obj_dict[key] = getattr(obj, key)
4863    
4864    NewClass = type(
4865        base.__name__ + 'WrappedByTGeneralObject',
4866        (base,),
4867        {
4868            '__getattribute__': tgeneralobject_custom_getattribute,
4869            '__setattr__': tgeneralobject_custom_setattr,
4870            '__delattr__': tgeneralobject_custom_delattr,
4871        }
4872    )
4873    obj.__class__ = NewClass
4874
4875
4876class TGeneralObject:
4877    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
4878        offset, real_size = shared_memory.malloc(ObjectType.tgeneralobject, 24)
4879        created_items_offsets: List[Offset] = list()
4880        try:
4881            make_changes_inplace: bool = True
4882            if isinstance(obj, ForceGeneralObjectCopy):
4883                obj = obj.obj
4884                make_changes_inplace = False
4885            elif isinstance(obj, ForceGeneralObjectInplace):
4886                obj = obj.obj
4887                make_changes_inplace = True
4888
4889            dumped_obj: bytes = pickle_dumps(obj)
4890            dumped_mapped_obj_type, dumped_obj_offset, dumped_obj_type_size = shared_memory.put_obj(dumped_obj)
4891            created_items_offsets.append(dumped_obj_offset)
4892            mapped_obj_dict, obj_dict_offset, obj_dict_size = shared_memory.put_obj(dict())
4893            created_items_offsets.append(obj_dict_offset)
4894            
4895            write_uint64(shared_memory.base_address, offset + 16 + 0, dumped_obj_offset)
4896            write_uint64(shared_memory.base_address, offset + 16 + 8, obj_dict_offset)
4897            
4898            setable_data_descriptor_field_names: Set[str] = set()
4899
4900            mapped_obj = None
4901            if make_changes_inplace:
4902                tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, True)
4903                mapped_obj = obj
4904            else:
4905                # mapped_obj = self.init_from_shared_memory(shared_memory, offset)
4906                mapped_obj = pickle_loads(dumped_obj)
4907                tgeneralobject_wrap_obj(mapped_obj, mapped_obj_dict, setable_data_descriptor_field_names, True)
4908
4909            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
4910            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
4911            write_uint64(shared_memory.base_address, offset + 16 + 16, dumped_setable_data_descriptor_field_names_offset)
4912        except:
4913            shared_memory.free(offset)
4914            for item_offset in created_items_offsets:
4915                shared_memory.destroy_obj(item_offset)
4916            
4917            raise
4918
4919        return mapped_obj, offset, real_size
4920    
4921    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
4922        if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset):
4923            raise WrongObjectTypeError
4924
4925        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4926        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
4927        
4928        obj_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
4929        mapped_obj_dict = shared_memory.get_obj(obj_dict_offset)
4930        obj = pickle_loads(dumped_obj)
4931        
4932        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
4933        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
4934        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
4935        
4936        tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, False)
4937        return obj
4938    
4939    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
4940        if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset):
4941            raise WrongObjectTypeError
4942
4943        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4944        shared_memory.destroy_obj(dumped_obj_offset)
4945        obj_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
4946        if obj_dict_offset:
4947            shared_memory.destroy_obj(obj_dict_offset)
4948        
4949        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
4950        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
4951        shared_memory.free(offset)
4952    
4953    # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
4954    #     if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + 0):
4955    #         raise WrongObjectTypeError
4956
4957    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4958    #     return shared_memory.get_obj_buffer(dumped_obj_offset)
4959    
4960    # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
4961    #     if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + 0):
4962    #         raise WrongObjectTypeError
4963
4964
4965    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4966    #     return shared_memory.get_obj_buffer_2(dumped_obj_offset)
4967
4968
4969# ======================================================================================================================
4970# === Static Object =============================================================================================================
4971
4972
4973class ForceStaticObjectCopy:
4974    def __init__(self, obj: Any) -> None:
4975        self.obj = obj
4976
4977
4978FStaticObjectCopy = ForceStaticObjectCopy
4979forcestaticobjectcopy = ForceStaticObjectCopy
4980fstaticobjectcopy = ForceStaticObjectCopy
4981
4982
4983class ForceStaticObjectInplace:
4984    def __init__(self, obj: Any) -> None:
4985        self.obj = obj
4986
4987
4988FStaticObjectInplace = ForceStaticObjectInplace
4989forcestaticobjectinplace = ForceStaticObjectInplace
4990fstaticobjectinplace = ForceStaticObjectInplace
4991
4992
4993class StaticObjectOffsets(IntEnum):
4994    pickled_obj = 0
4995    pickled_attributes_dict = 1
4996    attributes_slots = 2
4997    setable_data_descriptor_field_names = 3
4998
4999
5000def tstaticobject_custom_getattribute(self, name):
5001    if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'):
5002        return object.__getattribute__(self, name)
5003    
5004    try:
5005        return self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]]
5006    except KeyError:
5007        pass
5008    
5009    return object.__getattribute__(self, name)
5010
5011
5012def tstaticobject_custom_setattr(self, name, value):
5013    if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'):
5014        object.__setattr__(self, name, value)
5015    else:
5016        if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5017            object.__setattr__(self, name, value)
5018            return
5019        
5020        # try:
5021        #     if name in self._tstaticobject_setable_data_descriptor_field_names:
5022        #         object.__setattr__(self, name, value)
5023        # except AttributeError:
5024        #     pass
5025        
5026        try:
5027            self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]] = value
5028            return
5029        except KeyError:
5030            pass
5031            
5032        object.__setattr__(self, name, value)
5033
5034
5035def tstaticobject_custom_delattr(self, name):
5036    if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'):
5037        object.__delattr__(self, name)
5038    else:
5039        if name in self._tstaticobject_attributes_dict:
5040            raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only")
5041        else:
5042            object.__delattr__(self, name)
5043
5044
5045def tstaticobject_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool):
5046    base = obj.__class__
5047    setattr(obj, '_tstaticobject_attributes_dict', attributes_dict)
5048    setattr(obj, '_tstaticobject_attributes_slots', attributes_slots)
5049    setattr(obj, '_tstaticobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names)
5050    if init_mapped_attributes:
5051        object_fields = set(dir(object))
5052        obj_fields = set(dir(obj)) - object_fields
5053        good_fields: List[Hashable] = list()
5054        for key in obj_fields:
5055            value = getattr_static(obj, key)
5056            if key in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or key.startswith('__'):
5057                continue
5058
5059            if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5060                continue
5061
5062            if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))):
5063                continue
5064            
5065            if is_setable_data_descriptor(value):
5066                setable_data_descriptor_field_names.add(key)
5067            
5068            good_fields.append(key)
5069        
5070        good_fields_len = len(good_fields)
5071        attributes_slots.set_capacity(good_fields_len)
5072        attributes_slots.extend_with(good_fields_len, 0)
5073        for index, key in enumerate(good_fields):
5074            attributes_dict[key] = index
5075            value = getattr(obj, key)
5076            attributes_slots[index] = value
5077    
5078    NewClass = type(
5079        base.__name__ + 'WrappedByTStaticObject',
5080        (base,),
5081        {
5082            '__getattribute__': tstaticobject_custom_getattribute,
5083            '__setattr__': tstaticobject_custom_setattr,
5084            '__delattr__': tstaticobject_custom_delattr,
5085        }
5086    )
5087    obj.__class__ = NewClass
5088
5089
5090class TStaticObject:
5091    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
5092        offset, real_size = shared_memory.malloc(ObjectType.tstaticobject, 32)
5093        created_items_offsets: List[Offset] = list()
5094        try:
5095            make_changes_inplace: bool = True
5096            if isinstance(obj, ForceStaticObjectCopy):
5097                obj = obj.obj
5098                make_changes_inplace = False
5099            elif isinstance(obj, ForceStaticObjectInplace):
5100                obj = obj.obj
5101                make_changes_inplace = True
5102
5103            dumped_obj: bytes = pickle_dumps(obj)
5104            dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj)
5105            created_items_offsets.append(dumped_obj_offset)
5106            write_uint64(shared_memory.base_address, offset + 16 + 0, dumped_obj_offset)
5107
5108            attributes_dict: Dict = dict()
5109
5110            attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list())
5111            created_items_offsets.append(attributes_slots_offset)
5112            write_uint64(shared_memory.base_address, offset + 16 + 16, attributes_slots_offset)
5113            
5114            setable_data_descriptor_field_names: Set[str] = set()
5115
5116            mapped_obj = None
5117            if make_changes_inplace:
5118                tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5119                mapped_obj = obj
5120            else:
5121                # mapped_obj = self.init_from_shared_memory(shared_memory, offset)
5122                mapped_obj = pickle_loads(dumped_obj)
5123                tstaticobject_wrap_obj(mapped_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5124            
5125            dumped_attributes_dict: bytes = pickle_dumps(attributes_dict)
5126            dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict)
5127            write_uint64(shared_memory.base_address, offset + 16 + 8, dumped_attributes_dict_offset)
5128            
5129            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
5130            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
5131            write_uint64(shared_memory.base_address, offset + 16 + 24, dumped_setable_data_descriptor_field_names_offset)
5132        except:
5133            shared_memory.free(offset)
5134            for item_offset in created_items_offsets:
5135                shared_memory.destroy_obj(item_offset)
5136            
5137            raise
5138        
5139        return mapped_obj, offset, real_size
5140    
5141    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
5142        if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset):
5143            raise WrongObjectTypeError
5144
5145        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5146        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
5147        obj = pickle_loads(dumped_obj)
5148
5149        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5150        attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset)
5151
5152        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5153        dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset)
5154        attributes_dict = pickle_loads(dumped_attributes_dict)
5155
5156        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 24)
5157        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
5158        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
5159
5160        tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False)
5161        return obj
5162    
5163    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5164        if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset):
5165            raise WrongObjectTypeError
5166
5167        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5168        shared_memory.destroy_obj(dumped_obj_offset)
5169        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5170        shared_memory.destroy_obj(attributes_slots_offset)
5171        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5172        shared_memory.destroy_obj(dumped_attributes_dict_offset)
5173        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 24)
5174        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
5175        shared_memory.free(offset)
5176    
5177    # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5178    #     if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + 0):
5179    #         raise WrongObjectTypeError
5180
5181    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5182    #     return shared_memory.get_obj_buffer(dumped_obj_offset)
5183    
5184    # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5185    #     if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + 0):
5186    #         raise WrongObjectTypeError
5187
5188
5189    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5190    #     return shared_memory.get_obj_buffer_2(dumped_obj_offset)
5191
5192
5193# ======================================================================================================================
5194# === Static Object With Slots =============================================================================================================
5195
5196
5197class StaticObjectWithSlotsOffsets(IntEnum):
5198    pickled_obj = 0
5199    pickled_attributes_dict = 1
5200    attributes_slots = 2
5201    setable_data_descriptor_field_names = 3
5202
5203
5204def tstaticobjectwithslots_custom_getattribute(self, name):
5205    if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'):
5206        return object.__getattribute__(self, name)
5207    
5208    try:
5209        return self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]]
5210    except KeyError:
5211        pass
5212    
5213    return object.__getattribute__(self, name)
5214
5215
5216def tstaticobjectwithslots_custom_setattr(self, name, value):
5217    if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'):
5218        object.__setattr__(self, name, value)
5219    else:
5220        if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5221            object.__setattr__(self, name, value)
5222            return
5223        
5224        # try:
5225        #     if name in self._tstaticobjectwithslots_setable_data_descriptor_field_names:
5226        #         object.__setattr__(self, name, value)
5227        # except AttributeError:
5228        #     pass
5229        
5230        try:
5231            self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]] = value
5232            return
5233        except KeyError:
5234            pass
5235            
5236        object.__setattr__(self, name, value)
5237
5238
5239def tstaticobjectwithslots_custom_delattr(self, name):
5240    if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'):
5241        object.__delattr__(self, name)
5242    else:
5243        if name in self._tstaticobjectwithslots_attributes_dict:
5244            raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only")
5245        else:
5246            object.__delattr__(self, name)
5247
5248
5249def tstaticobjectwithslots_custom_init(self, original, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names):
5250    setattr(self, '_tstaticobjectwithslots_attributes_dict', attributes_dict)
5251    setattr(self, '_tstaticobjectwithslots_attributes_slots', attributes_slots)
5252    setattr(self, '_tstaticobjectwithslots_setable_data_descriptor_field_names', setable_data_descriptor_field_names)
5253    for attr_name in good_fields:
5254        setattr(self, attr_name, getattr(original, attr_name))
5255
5256
5257def tstaticobjectwithslots_custom_eq(self, other):
5258    parent_class = self.__class__.__bases__[0]
5259    if not isinstance(other, (type(self), parent_class)):
5260        return NotImplemented
5261
5262    for key in self._tstaticobjectwithslots_attributes_dict.keys():
5263        if not hasattr(other, key):
5264            return False
5265        
5266        if getattr(self, key) != getattr(other, key):
5267            return False
5268    
5269    return True
5270
5271
5272def tstaticobjectwithslots_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool) -> Any:
5273    base = obj.__class__
5274
5275    good_fields: List[Hashable] = list()
5276    if init_mapped_attributes:
5277        if hasattr(base, '__slots__'):
5278            obj_fields = base.__slots__
5279        else:
5280            object_fields = set(dir(object))
5281            obj_fields = set(dir(obj)) - object_fields
5282
5283        for key in obj_fields:
5284            value = getattr_static(obj, key)
5285            if key in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or key.startswith('__'):
5286                continue
5287
5288            if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5289                continue
5290
5291            if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))):
5292                continue
5293            
5294            if is_setable_data_descriptor(value):
5295                setable_data_descriptor_field_names.add(key)
5296            
5297            good_fields.append(key)
5298        
5299        good_fields_len = len(good_fields)
5300        attributes_slots.set_capacity(good_fields_len)
5301        attributes_slots.extend_with(good_fields_len, 0)
5302        for index, key in enumerate(good_fields):
5303            attributes_dict[key] = index
5304            value = getattr(obj, key)
5305            attributes_slots[index] = value
5306    
5307    NewClass = type(
5308        base.__name__ + 'WrappedByTStaticObjectWithSlots',
5309        (base,),
5310        {
5311            '__slots__': ['__dict__'],
5312            '__init__': tstaticobjectwithslots_custom_init,
5313            '__eq__': tstaticobjectwithslots_custom_eq,
5314            '__getattribute__': tstaticobjectwithslots_custom_getattribute,
5315            '__setattr__': tstaticobjectwithslots_custom_setattr,
5316            '__delattr__': tstaticobjectwithslots_custom_delattr,
5317        }
5318    )
5319
5320    new_obj = NewClass(obj, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names)
5321    
5322    return new_obj
5323
5324
5325class TStaticObjectWithSlots:
5326    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
5327        offset, real_size = shared_memory.malloc(ObjectType.tstaticobjectwithslots, 8 * len(StaticObjectWithSlotsOffsets))
5328        created_items_offsets: List[Offset] = list()
5329        try:
5330            dumped_obj: bytes = pickle_dumps(obj)
5331            dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj)
5332            created_items_offsets.append(dumped_obj_offset)
5333            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj, dumped_obj_offset)
5334
5335            attributes_dict: Dict = dict()
5336
5337            attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list())
5338            created_items_offsets.append(attributes_slots_offset)
5339            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots, attributes_slots_offset)
5340            
5341            setable_data_descriptor_field_names: Set[str] = set()
5342
5343            mapped_obj = None
5344            loaded_obj = pickle_loads(dumped_obj)
5345            mapped_obj = tstaticobjectwithslots_wrap_obj(loaded_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5346            
5347            dumped_attributes_dict: bytes = pickle_dumps(attributes_dict)
5348            dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict)
5349            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict, dumped_attributes_dict_offset)
5350            
5351            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
5352            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
5353            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset)
5354        except:
5355            shared_memory.free(offset)
5356            for item_offset in created_items_offsets:
5357                shared_memory.destroy_obj(item_offset)
5358            
5359            raise
5360        
5361        return mapped_obj, offset, real_size
5362    
5363    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
5364        if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset):
5365            raise WrongObjectTypeError
5366
5367        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5368        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
5369        obj = pickle_loads(dumped_obj)
5370
5371        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots)
5372        attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset)
5373
5374        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict)
5375        dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset)
5376        attributes_dict = pickle_loads(dumped_attributes_dict)
5377
5378        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names)
5379        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
5380        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
5381
5382        mapped_obj = tstaticobjectwithslots_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False)
5383        return mapped_obj
5384    
5385    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5386        if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset):
5387            raise WrongObjectTypeError
5388
5389        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5390        shared_memory.destroy_obj(dumped_obj_offset)
5391        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots)
5392        shared_memory.destroy_obj(attributes_slots_offset)
5393        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict)
5394        shared_memory.destroy_obj(dumped_attributes_dict_offset)
5395        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names)
5396        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
5397        shared_memory.free(offset)
5398    
5399    # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5400    #     if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + 0):
5401    #         raise WrongObjectTypeError
5402
5403    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5404    #     return shared_memory.get_obj_buffer(dumped_obj_offset)
5405    
5406    # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5407    #     if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + 0):
5408    #         raise WrongObjectTypeError
5409
5410
5411    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5412    #     return shared_memory.get_obj_buffer_2(dumped_obj_offset)
5413
5414
5415# ======================================================================================================================
5416# === Numpy ndarray =============================================================================================================
5417
5418
5419class TNumpyNdarrayOffsets(IntEnum):
5420    data_buffer_offset = 0
5421    shape_tuple_offset = 1
5422    pickled_datatype_offset = 2
5423
5424
5425class TNumpyNdarray:
5426    def map_to_shared_memory(self, shared_memory: 'SharedMemory', nparray: np.ndarray) -> Tuple[np.ndarray, Offset, Size]:
5427        shape = tuple(nparray.shape)
5428        data_type = nparray.dtype
5429        pickled_data_type = pickle_dumps(data_type)
5430        data_buffer: bytes = nparray.tobytes()
5431        offset, real_size = shared_memory.malloc(ObjectType.tnumpyndarray, 24)
5432        created_items_offsets: List[Offset] = list()
5433        try:
5434            data_buffer_mapped_obj, data_buffer_offset, data_buffer_size = shared_memory.put_obj(data_buffer)
5435            created_items_offsets.append(data_buffer_offset)
5436            shape_mapped_obj, shape_offset, shape_size = shared_memory.put_obj(shape)
5437            created_items_offsets.append(shape_offset)
5438            pickled_data_type_mapped_obj, pickled_data_type_offset, pickled_data_type_size = shared_memory.put_obj(pickled_data_type)
5439            write_uint64(shared_memory.base_address, offset + 16 + 0, data_buffer_offset)
5440            write_uint64(shared_memory.base_address, offset + 16 + 8, shape_offset)
5441            write_uint64(shared_memory.base_address, offset + 16 + 16, pickled_data_type_offset)
5442            mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type)
5443        except:
5444            shared_memory.free(offset)
5445            for item_offset in created_items_offsets:
5446                shared_memory.destroy_obj(item_offset)
5447            
5448            raise
5449
5450        return mapped_nparray, offset, real_size
5451    
5452    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
5453        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset):
5454            raise WrongObjectTypeError
5455
5456        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5457        shape_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5458        pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5459        shape = shared_memory.get_obj(shape_offset)
5460        pickled_data_type = shared_memory.get_obj(pickled_data_type_offset)
5461        data_type = pickle_loads(pickled_data_type)
5462        mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type)
5463        return mapped_nparray
5464    
5465    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5466        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset):
5467            raise WrongObjectTypeError
5468
5469        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5470        shape_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5471        pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5472        shared_memory.destroy_obj(data_buffer_offset)
5473        shared_memory.destroy_obj(shape_offset)
5474        shared_memory.destroy_obj(pickled_data_type_offset)
5475        shared_memory.free(offset)
5476    
5477    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5478        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + 0):
5479            raise WrongObjectTypeError
5480
5481        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5482        return shared_memory.get_obj_buffer(data_buffer_offset)
5483    
5484    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5485        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + 0):
5486            raise WrongObjectTypeError
5487
5488
5489        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5490        return shared_memory.get_obj_buffer_2(data_buffer_offset)
5491
5492
5493# ======================================================================================================================
5494# === Numpy ndarray =============================================================================================================
5495
5496
5497class TTorchTensorOffsets(IntEnum):
5498    numpy_ndarray_offset = 0
5499
5500
5501class TTorchTensor:
5502    def map_to_shared_memory(self, shared_memory: 'SharedMemory', tensor: Tensor) -> Tuple[Tensor, Offset, Size]:
5503        offset, real_size = shared_memory.malloc(ObjectType.ttorchtensor, 8)
5504        created_items_offsets: List[Offset] = list()
5505        try:
5506            numpy_ndarray_mapped_obj, numpy_ndarray_offset, numpy_ndarray_size = shared_memory.put_obj(tensor.numpy())
5507            created_items_offsets.append(numpy_ndarray_offset)
5508            write_uint64(shared_memory.base_address, offset + 16 + 0, numpy_ndarray_offset)
5509            mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj)
5510        except:
5511            self._offset = None
5512            shared_memory.free(offset)
5513            for item_offset in created_items_offsets:
5514                shared_memory.destroy_obj(item_offset)
5515            
5516            raise
5517        return mapped_torch_tensor, offset, real_size
5518    
5519    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
5520        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset):
5521            raise WrongObjectTypeError
5522
5523        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5524        numpy_ndarray_mapped_obj: np.ndarray = shared_memory.get_obj(numpy_ndarray_offset)
5525        mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj)
5526        return mapped_torch_tensor
5527    
5528    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5529        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset):
5530            raise WrongObjectTypeError
5531
5532        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5533        shared_memory.destroy_obj(numpy_ndarray_offset)
5534        shared_memory.free(offset)
5535    
5536    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5537        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + 0):
5538            raise WrongObjectTypeError
5539
5540        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5541        return shared_memory.get_obj_buffer(numpy_ndarray_offset)
5542    
5543    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5544        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + 0):
5545            raise WrongObjectTypeError
5546
5547
5548        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5549        return shared_memory.get_obj_buffer_2(numpy_ndarray_offset)
5550
5551
5552# ======================================================================================================================
5553# === Types and Codecs ==========================================================================================================
5554
5555
5556# Add your own codecs to `codec_by_type`
5557codec_by_type: Dict[ObjectType, TBase] = {
5558    ObjectType.tnone: TNone(),
5559    ObjectType.tint: TInt(),
5560    ObjectType.tbool: TBool(),
5561    ObjectType.tfloat: TFloat(),
5562    ObjectType.tcomplex: TComplex(),
5563    ObjectType.tdecimal: TDecimal(),
5564    ObjectType.tdatetime: TDatetime(),
5565    ObjectType.tslice: TSlice(),
5566    ObjectType.tbytes: TBytes(),
5567    ObjectType.tbytearray: TBytearray(),
5568    ObjectType.tstr: TStr(),
5569    ObjectType.tlist: TList(),
5570    ObjectType.ttuple: TTuple(),
5571    ObjectType.tmutableset: TMutableSet(),
5572    ObjectType.tset: TSet(),
5573    ObjectType.tmutablemapping: TMutableMapping(),
5574    ObjectType.tmapping: TMapping(),
5575    ObjectType.tfastset: TFastSet(),
5576    ObjectType.tfastdict: TFastDict(),
5577    ObjectType.tsmallint: TSmallInt(),
5578    ObjectType.tbigint: TBigInt(),
5579    ObjectType.tgeneralobject: TGeneralObject(),
5580    ObjectType.tpickable: TGeneralObject(),
5581    ObjectType.tstaticobject: TStaticObject(),
5582    ObjectType.tstaticobjectwithslots: TStaticObjectWithSlots(),
5583    ObjectType.tnumpyndarray: TNumpyNdarray(),
5584    ObjectType.ttorchtensor: TTorchTensor(),
5585}
5586
5587# Add your own types to `obj_type_map`
5588obj_type_map: Dict[Type, ObjectType] = {
5589}
5590
5591
5592# ======================================================================================================================
5593# === Message ==========================================================================================================
5594
5595
5596class MessageOffsets(IntEnum):
5597    previous_message_offset = 0
5598    next_message_offset = 1
5599    item_offset = 2
5600
5601
5602class SharedMemory:
5603    def __init__(self, name: str, create: bool = False, size: Optional[int] = None, queue_type: QueueType = QueueType.fifo, zero_mem: bool = True, 
5604                 consumer_id: Optional[int] = None, creator_destroy_timeout: float = 5.0, unlink_old: bool = True):
5605        global current_shared_memory_instance
5606        current_shared_memory_instance = self
5607        self._initiated: bool = False
5608        self._consumer_id: Optional[int] = consumer_id
5609        self._creator_destroy_timeout: float = creator_destroy_timeout
5610        self.offset_to_be_monitored: Offset = None
5611        self._malloc_time: float = 0.0
5612        self._realloc_time: float = 0.0
5613        self._name: str = name
5614        self._create: bool = create
5615        self._queue_type: QueueType = queue_type
5616        self._zero_mem: bool = zero_mem
5617        self._last_message_offset: Offset = None
5618        self._asleep_func: Coroutine = self._default_asleep_func
5619
5620        sys_arr_length = 13
5621        self.global_sys_array_len: int = sys_arr_length
5622        arr_byte_size = sys_arr_length * 8
5623        self.global_sys_area_size: int = arr_byte_size
5624
5625        self._size: Optional[int] = size or None
5626        if (size is None) or (0 == size):
5627            size = self.global_sys_area_size
5628            if self._create:
5629                self._size = size
5630        
5631        if self._create:
5632            if unlink_old:
5633                SharedMemory.unlink_by_name(name)
5634            
5635            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=name, create=create, size=size)
5636            self._init_post_mem()
5637            
5638            write_uint64(self.base_address, self.sys_values_offset + 0, self._size)
5639            write_uint64(self.base_address, self.sys_values_offset + 8, sys_arr_length * 8)
5640            write_uint64(self.base_address, self.sys_values_offset + 16, self._size - arr_byte_size)
5641            write_uint64(self.base_address, self.sys_values_offset + 24, self._size)
5642            write_uint64(self.base_address, self.sys_values_offset + 32, sys_arr_length * 8)
5643            write_uint64(self.base_address, self.sys_values_offset + 40, 0)
5644            write_uint64(self.base_address, self.sys_values_offset + 48, 0)
5645            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
5646            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
5647            write_uint64(self.base_address, self.sys_values_offset + 72, 0)
5648            write_uint64(self.base_address, self.sys_values_offset + 80, 0)
5649            write_uint64(self.base_address, self.sys_values_offset + 88, 0)
5650            write_uint64(self.base_address, self.sys_values_offset + 96, 0)
5651            # print(bytes(self._shared_memory.buf[0:120]))
5652
5653            self.free_memory_search_start = self.read_free_memory_search_start()
5654            data_size: int = self.get_data_size()
5655            if self._zero_mem:
5656                zero_memory(self.base_address, self.free_memory_search_start, data_size)
5657            
5658            write_uint64(self.base_address, self.free_memory_search_start + 0, 0)
5659            write_uint64(self.base_address, self.free_memory_search_start + 8, data_size - 16)
5660
5661            self.set_creator_ready()
5662
5663            # print(bytes(self._shared_memory.buf[0:120]))
5664            self.get_data_end_offset()
5665            if self._create:
5666                self._initiated = True
5667            
5668        full_memory_barrier()
5669    
5670    async def _default_asleep_func(self):
5671        await asyncio.sleep(0)
5672    
5673    @property
5674    def size(self) -> int:
5675        return self._size
5676
5677    @property
5678    def name(self) -> str:
5679        return self._name
5680    
5681    @property
5682    def create(self) -> bool:
5683        return self._create
5684    
5685    def _init_post_mem(self):
5686        self.base_address = ctypes.addressof(ctypes.c_char.from_buffer(self._shared_memory.buf))
5687        self.sys_values_offset = 0
5688        # if create:
5689        #     print(f'Creator: {self.base_address=}')
5690        # else:
5691        #     print(f'Consumer: {self.base_address=}')
5692
5693        # self._shared_memory_bytearray = bytearray(self._shared_memory.buf)
5694
5695        # self.sys_arr = np.ndarray((self.global_sys_array_len,), dtype=np.uint64, buffer=self._shared_memory.buf)
5696        # if DEBUG:
5697        #     self.log_arr = np.ndarray((500,), dtype=np.uint64, buffer=self._shared_memory.buf)
5698        # else:
5699        #     self.log_arr = self.sys_arr
5700    
5701    def init_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool:
5702        if self._initiated:
5703            return
5704
5705        if not self.wait_shared_memory_ready(time_limit):
5706            return False
5707        
5708        if (self._size is None) or (0 == self._size):
5709            size: int = self.global_sys_area_size
5710        else:
5711            size = self._size
5712
5713        self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size)
5714        self._init_post_mem()
5715        self.wait_creator_ready()
5716        
5717        if self._size is None:
5718            self._size = read_uint64(self.base_address, self.sys_values_offset + 0)
5719            self._shared_memory.close()
5720            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size)
5721        
5722        self._init_post_mem()
5723        self.free_memory_search_start = self.read_free_memory_search_start()
5724        
5725        self.set_consumer_ready()
5726
5727        # print(bytes(self._shared_memory.buf[0:120]))
5728        self.get_data_end_offset()
5729        self._initiated = True
5730        full_memory_barrier()
5731    
5732    async def ainit_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool:
5733        if self._initiated:
5734            return
5735
5736        if not await self.await_shared_memory_ready(time_limit):
5737            return False
5738        
5739        if (self._size is None) or (0 == self._size):
5740            size: int = self.global_sys_area_size
5741        else:
5742            size = self._size
5743
5744        self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size)
5745        self._init_post_mem()
5746        await self.await_creator_ready(time_limit)
5747        
5748        if self._size is None:
5749            self._size = read_uint64(self.base_address, self.sys_values_offset + 0)
5750            self._shared_memory.close()
5751            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size)
5752        
5753        self._init_post_mem()
5754        self.free_memory_search_start = self.read_free_memory_search_start()
5755        
5756        self.set_consumer_ready()
5757
5758        # print(bytes(self._shared_memory.buf[0:120]))
5759        self.get_data_end_offset()
5760        self._initiated = True
5761        full_memory_barrier()
5762    
5763    def close_consumer(self):
5764        self.set_consumer_closed()
5765        full_memory_barrier()
5766    
5767    def __enter__(self):
5768        return self
5769    
5770    def __exit__(self, exc_type, exc_value, traceback):
5771        self.proper_close()
5772    
5773    async def __aenter__(self):
5774        return self
5775    
5776    async def __aexit__(self, exc_type, exc_value, traceback):
5777        await self.aproper_close()
5778    
5779    def close(self):
5780        self._shared_memory.close()
5781        if self._create:
5782            self._shared_memory.unlink()
5783            SharedMemory.unlink_by_name(self._name)
5784        else:
5785            if 'posix' == os.name:
5786                try:
5787                    from multiprocessing import resource_tracker
5788                    shm_name = f'/{self._name}'
5789                    resource_tracker.unregister(shm_name, "shared_memory")
5790                except FileNotFoundError:
5791                    pass
5792
5793    def proper_close(self):
5794        if self._create:
5795            self.wait_consumer_closed(self._creator_destroy_timeout)
5796        else:
5797            self.close_consumer()
5798        
5799        self.close()
5800
5801    async def aproper_close(self):
5802        if self._create:
5803            await self.await_consumer_closed(self._creator_destroy_timeout)
5804        else:
5805            self.close_consumer()
5806        
5807        self.close()
5808
5809    @staticmethod
5810    def unlink_by_name(shared_memory_name: str):
5811        """`multiprocessing.SharedMemory` requires this cleanup in order to handle the case 
5812            when the previous run of the program was terminated unexpectedly
5813
5814        Args:
5815            shared_memory_name (str): _description_
5816        """        
5817        if 'posix' == os.name:
5818            try:
5819                import _posixshmem
5820                from multiprocessing import resource_tracker
5821                shm_name = f'/{shared_memory_name}'
5822                _posixshmem.shm_unlink(shm_name)
5823                resource_tracker.unregister(shm_name, "shared_memory")
5824            except FileNotFoundError:
5825                pass
5826    
5827    @property
5828    def buf(self):
5829        """A memoryview of contents of the shared memory block.
5830
5831        Returns:
5832            _type_: _description_
5833        """        
5834        return self._shared_memory.buf
5835    
5836    def mem_view(self, offset: Offset, size: Size) -> memoryview:
5837        return self._shared_memory.buf[offset:offset + size]
5838    
5839    def read_mem(self, offset: Offset, size: Size) -> List[int]:
5840        result = list()
5841        for i in range(size):
5842            result.append(read_uint8(self.base_address, offset + i))
5843        
5844        return result
5845    
5846    def print_mem(self, offset: Offset, size: Size, text: str = None):
5847        result = list()
5848        for i in range(size):
5849            result.append(read_uint8(self.base_address, offset + i))
5850        
5851        if text:
5852            print(f'{text.format(offset)}: {result}')
5853        else:
5854            print(f'{result}')
5855    
5856    def set_creator_ready(self):
5857        write_uint64(self.base_address, self.sys_values_offset + 88, 1)
5858    
5859    def set_consumer_ready(self):
5860        write_uint64(self.base_address, self.sys_values_offset + 96, 1)
5861    
5862    def set_consumer_closed(self):
5863        write_uint64(self.base_address, self.sys_values_offset + 96, 0)
5864    
5865    def get_creator_ready(self):
5866        return read_uint64(self.base_address, self.sys_values_offset + 88)
5867    
5868    def get_consumer_ready(self):
5869        return read_uint64(self.base_address, self.sys_values_offset + 96)
5870
5871    def wait_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5872        start_time = cpu_clock()
5873        shared_memory: MultiprocessingSharedMemory = None
5874        while True:
5875            try:
5876                shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False)
5877            except FileNotFoundError as ex:
5878                if time_limit is not None:
5879                    if (cpu_clock() - start_time) > time_limit:
5880                        return False
5881                
5882                if periodic_sleep_time is None:
5883                    continue
5884                else:
5885                    sleep(periodic_sleep_time)
5886            finally:
5887                if shared_memory is not None:
5888                    shared_memory.close()
5889                    return True
5890        
5891        return False
5892
5893    async def await_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5894        start_time = cpu_clock()
5895        shared_memory: MultiprocessingSharedMemory = None
5896        while True:
5897            try:
5898                shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False)
5899            except FileNotFoundError as ex:
5900                if time_limit is not None:
5901                    if (cpu_clock() - start_time) > time_limit:
5902                        return False
5903                
5904                await self._asleep_func()
5905            finally:
5906                if shared_memory is not None:
5907                    shared_memory.close()
5908                    return True
5909        
5910        return False
5911    
5912    def wait_creator_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5913        if self._create:
5914            return
5915        
5916        start_time = cpu_clock()
5917        full_memory_barrier()
5918        while not read_uint64(self.base_address, self.sys_values_offset + 88):
5919            if time_limit is not None:
5920                if (cpu_clock() - start_time) > time_limit:
5921                    return False
5922            
5923            if periodic_sleep_time is None:
5924                mm_pause()
5925            else:
5926                hps_sleep(periodic_sleep_time)
5927            
5928            full_memory_barrier()
5929    
5930    async def await_creator_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5931        if self._create:
5932            return
5933        
5934        start_time = cpu_clock()
5935        full_memory_barrier()
5936        while not read_uint64(self.base_address, self.sys_values_offset + 88):
5937            if time_limit is not None:
5938                if (cpu_clock() - start_time) > time_limit:
5939                    return False
5940            
5941            await self._asleep_func()
5942            
5943            full_memory_barrier()
5944    
5945    def wait_consumer_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5946        if not self._create:
5947            return
5948        
5949        start_time = cpu_clock()
5950        full_memory_barrier()
5951        while not read_uint64(self.base_address, self.sys_values_offset + 96):
5952            if time_limit is not None:
5953                if (cpu_clock() - start_time) > time_limit:
5954                    return False
5955            
5956            if periodic_sleep_time is None:
5957                mm_pause()
5958            else:
5959                hps_sleep(periodic_sleep_time)
5960            
5961            full_memory_barrier()
5962    
5963    async def await_consumer_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5964        if not self._create:
5965            return
5966        
5967        start_time = cpu_clock()
5968        full_memory_barrier()
5969        while not read_uint64(self.base_address, self.sys_values_offset + 96):
5970            if time_limit is not None:
5971                if (cpu_clock() - start_time) > time_limit:
5972                    return False
5973            
5974            await self._asleep_func()
5975            
5976            full_memory_barrier()
5977    
5978    def wait_consumer_closed(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5979        if not self._create:
5980            return
5981        
5982        start_time = cpu_clock()
5983        full_memory_barrier()
5984        while read_uint64(self.base_address, self.sys_values_offset + 96):
5985            if time_limit is not None:
5986                if (cpu_clock() - start_time) > time_limit:
5987                    return False
5988            
5989            if periodic_sleep_time is None:
5990                mm_pause()
5991            else:
5992                hps_sleep(periodic_sleep_time)
5993            
5994            full_memory_barrier()
5995    
5996    async def await_consumer_closed(self, time_limit: Optional[RationalNumber] = None) -> bool:
5997        if not self._create:
5998            return
5999        
6000        start_time = cpu_clock()
6001        full_memory_barrier()
6002        while read_uint64(self.base_address, self.sys_values_offset + 96):
6003            if time_limit is not None:
6004                if (cpu_clock() - start_time) > time_limit:
6005                    return False
6006            
6007            await self._asleep_func()
6008            
6009            full_memory_barrier()
6010    
6011    def creator_in_charge(self) -> bool:
6012        return read_uint64(self.base_address, self.sys_values_offset + 56)
6013    
6014    def consumer_in_charge(self) -> bool:
6015        return read_uint64(self.base_address, self.sys_values_offset + 64)
6016    
6017    def creator_wants_to_be_in_charge(self) -> bool:
6018        return read_uint64(self.base_address, self.sys_values_offset + 72)
6019    
6020    def consumer_wants_to_be_in_charge(self) -> bool:
6021        return read_uint64(self.base_address, self.sys_values_offset + 80)
6022    
6023    def read_free_memory_search_start(self) -> int:
6024        # return self.get_data_start_offset()
6025        return read_uint64(self.base_address, self.sys_values_offset + 32)
6026    
6027    def update_free_memory_search_start(self) -> int:
6028        self.free_memory_search_start = self.read_free_memory_search_start()
6029    
6030    def get_free_memory_search_start(self) -> int:
6031        # self.update_free_memory_search_start()
6032        return self.free_memory_search_start
6033    
6034    def write_free_memory_search_start(self, offset: Offset) -> int:
6035        # return
6036        if ((self.get_data_end_offset() - 16) < offset) or (offset < self.get_data_start_offset()):
6037            offset = self.get_data_start_offset()
6038        
6039        write_uint64(self.base_address, self.sys_values_offset + 32, offset)
6040    
6041    def commit_free_memory_search_start(self):
6042        self.write_free_memory_search_start(self.free_memory_search_start)
6043    
6044    def set_free_memory_search_start(self, offset: Offset) -> int:
6045        # return
6046        if ((self.get_data_end_offset() - 16) < offset) or (offset < self.get_data_start_offset()):
6047            offset = self.get_data_start_offset()
6048        
6049        self.free_memory_search_start = offset
6050        # self.commit_free_memory_search_start()
6051    
6052    def get_last_message_offset(self) -> Optional[Offset]:
6053        return read_uint64(self.base_address, self.sys_values_offset + 48)
6054
6055    def set_last_message_offset(self, offset: Offset):
6056        write_uint64(self.base_address, self.sys_values_offset + 48, offset)
6057    
6058    def get_first_message_offset(self) -> Optional[Offset]:
6059        return read_uint64(self.base_address, self.sys_values_offset + 40)
6060
6061    def set_first_message_offset(self, offset: Offset):
6062        write_uint64(self.base_address, self.sys_values_offset + 40, offset)
6063    
6064    def get_data_start_offset(self) -> Offset:
6065        return read_uint64(self.base_address, self.sys_values_offset + 8)
6066
6067    def get_data_size(self) -> Size:
6068        return read_uint64(self.base_address, self.sys_values_offset + 16)
6069    
6070    def get_data_end_offset(self) -> Offset:
6071        result = read_uint64(self.base_address, self.sys_values_offset + 24)
6072        if result != len(self._shared_memory.buf):
6073            print(result, len(self._shared_memory.buf))
6074        
6075        return result
6076
6077    # def read_uint64(self, offset: Offset) -> int:
6078    #     return read_uint64(self.base_address, offset)
6079    
6080    # def write_uint64(self, offset: Offset, value: int):
6081    #     write_uint64(self.base_address, offset, value)
6082    
6083    def read_uint64(self, offset: Offset) -> int:
6084        return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False)
6085    
6086    def write_uint64(self, offset: Offset, value: int):
6087        self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False)
6088    
6089    # def read_uint32(self, offset: Offset) -> int:
6090    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=False)
6091    
6092    # def write_uint32(self, offset: Offset, value: int):
6093    #     self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=False)
6094    
6095    # def read_uint16(self, offset: Offset) -> int:
6096    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 2], byteorder='little', signed=False)
6097    
6098    # def write_uint16(self, offset: Offset, value: int):
6099    #     self._shared_memory.buf[offset:offset + 2] = value.to_bytes(2, byteorder='little', signed=False)
6100    
6101    # def read_uint8(self, offset: Offset) -> int:
6102    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=False)
6103    
6104    # def write_uint8(self, offset: Offset, value: int):
6105    #     self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=False)
6106    
6107    # def read_int64(self, offset: Offset) -> int:
6108    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=True)
6109    
6110    # def write_int64(self, offset: Offset, value: int):
6111    #     self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=True)
6112    
6113    # def read_int32(self, offset: Offset) -> int:
6114    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=True)
6115    
6116    # def write_int32(self, offset: Offset, value: int):
6117    #     self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=True)
6118    
6119    # def read_int16(self, offset: Offset) -> int:
6120    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 2], byteorder='little', signed=True)
6121    
6122    # def write_int16(self, offset: Offset, value: int):
6123    #     self._shared_memory.buf[offset:offset + 2] = value.to_bytes(2, byteorder='little', signed=True)
6124
6125    # def read_int8(self, offset: Offset) -> int:
6126    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=True)
6127    
6128    # def write_int8(self, offset: Offset, value: int):
6129    #     self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=True)
6130
6131    # def read_float(self, offset: Offset) -> float:
6132    #     return float.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=False)
6133    
6134    # def write_float(self, offset: Offset, value: float):
6135    #     self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=False)
6136
6137    # def read_double(self, offset: Offset) -> float:
6138    #     return float.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False)
6139    
6140    # def write_double(self, offset: Offset, value: float):
6141    #     self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False)
6142    
6143    # def read_complex(self, offset: Offset) -> complex:
6144    #     return complex.from_bytes(self._shared_memory.buf[offset:offset + 16], byteorder='little', signed=False)
6145    
6146    # def write_complex(self, offset: Offset, value: complex):
6147    #     self._shared_memory.buf[offset:offset + 16] = value.to_bytes(16, byteorder='little', signed=False)
6148    
6149    # def read_bool(self, offset: Offset) -> bool:
6150    #     return bool.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=False)
6151    
6152    # def write_bool(self, offset: Offset, value: bool):
6153    #     self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=False)
6154    
6155    # def read_str(self, offset: Offset) -> str:
6156    #     size = read_uint64(self.base_address, offset)
6157    #     return self._shared_memory.buf[offset + 8:offset + 8 + size].decode()
6158    
6159    # def read_str_2(self, offset: Offset, size: Size) -> str:
6160    #     return self._shared_memory.buf[offset + 8:offset + 8 + size].decode()
6161    
6162    # def write_str(self, offset: Offset, value: str):
6163    #     size = len(value)
6164    #     write_uint64(self.base_address, offset, size)
6165    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value.encode()
6166    
6167    # def read_bytes(self, offset: Offset) -> bytes:
6168    #     size = read_uint64(self.base_address, offset)
6169    #     return self._shared_memory.buf[offset + 8:offset + 8 + size]
6170
6171    # def read_bytes_2(self, offset: Offset, size: Size) -> bytes:
6172    #     return self._shared_memory.buf[offset + 8:offset + 8 + size]
6173    
6174    # def write_bytes(self, offset: Offset, value: bytes):
6175    #     size = len(value)
6176    #     write_uint64(self.base_address, offset, size)
6177    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6178    
6179    # def read_bytearray(self, offset: Offset) -> bytearray:
6180    #     size = read_uint64(self.base_address, offset)
6181    #     return bytearray(self._shared_memory.buf[offset + 8:offset + 8 + size])
6182    
6183    # def read_bytearray_2(self, offset: Offset, size: Size) -> bytearray:
6184    #     return bytearray(self._shared_memory.buf[offset + 8:offset + 8 + size])
6185    
6186    # def write_bytearray(self, offset: Offset, value: bytearray):
6187    #     size = len(value)
6188    #     write_uint64(self.base_address, offset, size)
6189    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6190    
6191    # def read_tuple(self, offset: Offset) -> tuple:
6192    #     size = read_uint64(self.base_address, offset)
6193    #     return tuple(self._shared_memory.buf[offset + 8:offset + 8 + size])
6194    
6195    # def write_tuple(self, offset: Offset, value: tuple):
6196    #     size = len(value)
6197    #     write_uint64(self.base_address, offset, size)
6198    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6199    
6200    # def read_list(self, offset: Offset) -> list:
6201    #     size = read_uint64(self.base_address, offset)
6202    #     return list(self._shared_memory.buf[offset + 8:offset + 8 + size])
6203    
6204    # def write_list(self, offset: Offset, value: list):
6205    #     size = len(value)
6206    #     write_uint64(self.base_address, offset, size)
6207    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6208
6209    # def read_dict(self, offset: Offset) -> dict:
6210    #     size = read_uint64(self.base_address, offset)
6211    #     return dict(self._shared_memory.buf[offset + 8:offset + 8 + size])
6212    
6213    # def write_dict(self, offset: Offset, value: dict):
6214    #     size = len(value)
6215    #     write_uint64(self.base_address, offset, size)
6216    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6217    
6218    # def read_set(self, offset: Offset) -> set:
6219    #     size = read_uint64(self.base_address, offset)
6220    #     return set(self._shared_memory.buf[offset + 8:offset + 8 + size])
6221    
6222    # def write_set(self, offset: Offset, value: set):
6223    #     size = len(value)
6224    #     write_uint64(self.base_address, offset, size)
6225    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6226    
6227    # def read_pickable(self, offset: Offset) -> Any:
6228    #     size = read_uint64(self.base_address, offset)
6229    #     return pickle.loads(self._shared_memory.buf[offset + 8:offset + 8 + size])
6230    
6231    # def write_pickable(self, offset: Offset, value: Any):
6232    #     value_bytes = pickle.dumps(value)
6233    #     size = len(value_bytes)
6234    #     write_uint64(self.base_address, offset, size)
6235    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value_bytes
6236
6237    # ----------------------------
6238    
6239    def read_obj_type_and_size(self, offset: Offset) -> Tuple[ObjectType, Size]:
6240        obj_type = ObjectType(read_uint64(self.base_address, offset + 0))
6241        size = read_uint64(self.base_address, offset + 8)
6242        return obj_type, size
6243    
6244    def write_obj_type_and_size(self, offset: Offset, obj_type: ObjectType, size: Size):
6245        write_uint64(self.base_address, offset + 0, obj_type.value)
6246        write_uint64(self.base_address, offset + 8, size)
6247        return offset + 16
6248
6249    # ----------------------------
6250    
6251    def test_free_memory_blocks(self, offset: Offset, desired_size: Size, data_end_offset: Offset) -> Tuple[bool, Size, Offset]:
6252        adjusted_size = desired_size
6253        initial_offset = offset
6254        sum_size = 0
6255        max_viable_offset = data_end_offset - 16
6256        last_found_obj_offset = None
6257        last_found_obj_size = None
6258        while True:
6259            last_found_obj_offset = offset
6260            try:
6261                obj_type = ObjectType(read_uint64(self.base_address, offset))
6262            except ValueError:
6263                print(f'Error: {offset=}, {desired_size=}, {sum_size=}')
6264            
6265            size = read_uint64(self.base_address, offset + 8)
6266            if size % 8:
6267                print(f'WRONG SIZE {obj_type=} {size=} {offset=} {desired_size=} {data_end_offset=}')
6268                self.print_mem(offset - 8 * 10, 8 * 10, 'WRONG SIZE - before')
6269                self.print_mem(offset, 8 * 10, 'WRONG SIZE - after')
6270                raise RuntimeError(f'WRONG SIZE: {size=}, {offset=}, {obj_type=}')
6271            
6272            last_found_obj_size = 16 + size
6273            next_block_offset = last_found_obj_offset + last_found_obj_size
6274            if next_block_offset > data_end_offset:
6275                print(f'{next_block_offset=}, {data_end_offset=}, {len(self._shared_memory.buf)=}')
6276                return False, adjusted_size, None, None, next_block_offset
6277
6278            if obj_type is not ObjectType.tfree_memory:
6279                return False, adjusted_size, None, None, next_block_offset
6280
6281            sum_size = next_block_offset - initial_offset
6282
6283            if sum_size == desired_size:
6284                return True, adjusted_size, None, None, next_block_offset
6285
6286            if sum_size > desired_size:
6287                new_next_block_offset = initial_offset + desired_size
6288                new_next_block_size = last_found_obj_size - (new_next_block_offset - last_found_obj_offset)
6289                if new_next_block_size < 16:
6290                    adjusted_size = desired_size + new_next_block_size
6291                    return True, adjusted_size, None, None, next_block_offset
6292                else:
6293                    return True, adjusted_size, new_next_block_offset, new_next_block_size, new_next_block_offset
6294
6295            offset = last_found_obj_offset + last_found_obj_size
6296            if offset > max_viable_offset:
6297                return False, adjusted_size, None, None, next_block_offset
6298
6299    def combine_free_memory_blocks(self, free_mem_block_offset: Offset, size: Size, last_free_block_offset: Offset, last_free_block_new_size: Size, next_block_offset: Offset, mark_block: bool = False) -> Tuple[Size, Offset]:
6300        if mark_block:
6301            self.write_obj_type_and_size(free_mem_block_offset, ObjectType.tfree_memory, size - 16)
6302        
6303        if last_free_block_offset is not None:
6304            if last_free_block_new_size - 16 < 0:
6305                print(f'Error: {last_free_block_new_size=}')
6306            
6307            self.write_obj_type_and_size(last_free_block_offset, ObjectType.tfree_memory, last_free_block_new_size - 16)
6308        
6309        # self.set_free_memory_search_start(next_block_offset)
6310
6311    # ----------------------------
6312    
6313    def malloc(self, obj_type: ObjectType, size: Size, loop_allowed: bool = True, zero_mem: bool = False) -> Tuple[Optional[Offset], Size]:
6314        start_time = cpu_clock()
6315        try:
6316            size += 16
6317            size = nearest_size(size)
6318            adjusted_size = size
6319            initial_start_offset = self.get_free_memory_search_start()
6320            data_end_offset: Offset = self.get_data_end_offset()
6321            search_end_offset = data_end_offset - 16
6322            start_offset = initial_start_offset
6323            free_mem_block_offset: Offset = None
6324            last_free_block_offset: Offset = None
6325            last_free_block_new_size: Size = None
6326            found: bool = False
6327            sum_size: Size = 0
6328            while (not found) and (start_offset <= search_end_offset):
6329                free_mem_block_offset = start_offset
6330                found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset)
6331                start_offset = next_block_offset
6332            
6333            if (not found) and loop_allowed:
6334                start_offset = self.get_data_start_offset()
6335                search_end_offset = initial_start_offset - 16
6336                while (not found) and (start_offset <= search_end_offset):
6337                    free_mem_block_offset = start_offset
6338                    found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset)
6339                    start_offset = next_block_offset
6340
6341            if not found:
6342                raise FreeMemoryChunkNotFoundError(obj_type, size, loop_allowed, zero_mem)
6343            
6344            self.combine_free_memory_blocks(free_mem_block_offset, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset)
6345            obj_size = adjusted_size - 16
6346            self.write_obj_type_and_size(free_mem_block_offset, obj_type, obj_size)
6347            if zero_mem:
6348                # print(f'Zeroing memory 1: {free_mem_block_offset=}, {result_size=}')
6349                # hps_sleep(0.01)
6350                zero_memory(self.base_address, free_mem_block_offset + 16, obj_size)
6351
6352            if free_mem_block_offset % 8:
6353                print(f'Error: {free_mem_block_offset=}, {obj_size=}')
6354                
6355        
6356            self.set_free_memory_search_start(free_mem_block_offset)
6357            return free_mem_block_offset, obj_size
6358        finally:
6359            self._malloc_time += cpu_clock() - start_time
6360    
6361    # def zero_memory(self, offset: Offset, size: Size):
6362    #     # print(f'Zeroing memory 1: [{self.base_address + offset}:{self.base_address + offset + size}], {size=}')
6363    #     self._shared_memory_bytearray[offset:offset + size] = bytearray(size)
6364    
6365    def calloc(self, obj_type: ObjectType, size: Size, num: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]:
6366        return self.malloc(obj_type, size * num, loop_allowed, zero_mem)
6367    
6368    def realloc(self, obj_offset: Offset, new_size: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]:
6369        start_time: float = cpu_clock()
6370        internal_malloc_time: float = 0.0
6371        try:
6372            new_size += 16
6373            new_size = nearest_size(new_size)
6374            data_end_offset: Offset = self.get_data_end_offset()
6375            result_offset: Offset = None
6376            result_obj_size: Size = 0
6377            original_obj_size = read_uint64(self.base_address, obj_offset + 8)
6378            size = original_obj_size + 16
6379            next_obj_offset = obj_offset + size
6380            free_mem_block_offset = next_obj_offset
6381            dsize = new_size - size
6382            found, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(free_mem_block_offset, dsize, data_end_offset)
6383            if found:
6384                self.combine_free_memory_blocks(free_mem_block_offset, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset)
6385                if zero_mem:
6386                    # print(f'Zeroing memory 3: {free_mem_block_offset=}, {result_size=}')
6387                    # hps_sleep(0.01)
6388                    zero_memory(self.base_address, free_mem_block_offset, dsize)
6389                
6390                result_obj_size = new_size - 16
6391                write_uint64(self.base_address, obj_offset + 8, result_obj_size)
6392                self.set_free_memory_search_start(obj_offset)
6393                result_offset = obj_offset
6394            else:
6395                internal_malloc_start_time: float = cpu_clock()
6396                new_offset, result_obj_size = self.malloc(ObjectType(read_uint64(self.base_address, obj_offset + 0)), new_size, loop_allowed)
6397                internal_malloc_time += cpu_clock() - internal_malloc_start_time
6398                if new_offset is None:
6399                    return None, 0
6400
6401                self._shared_memory.buf[new_offset + 16:new_offset + 16 + size] = self._shared_memory.buf[obj_offset + 16:obj_offset + 16 + size]
6402                if zero_mem:
6403                    # print(f'Zeroing memory 4: {new_offset=}, {new_size=}')
6404                    # hps_sleep(0.01)
6405                    zero_memory(self.base_address, new_offset + 16 + original_obj_size, result_obj_size - original_obj_size)
6406                
6407                self.free(obj_offset)
6408                result_offset = new_offset
6409            
6410            return result_offset, result_obj_size
6411        finally:
6412            self._realloc_time += cpu_clock() - start_time - internal_malloc_time
6413    
6414    def free(self, offset: Offset) -> bool:
6415        write_uint64(self.base_address, offset, 0)
6416        return True
6417
6418    # ----------------------------
6419    
6420    def put_obj(self, obj: Any):
6421        obj_type = self._get_obj_type(obj)
6422        codec = codec_by_type[obj_type]
6423        mapped_obj, offset, size = codec.map_to_shared_memory(self, obj)
6424        return mapped_obj, offset, size
6425
6426    def get_obj(self, offset: int) -> Any:
6427        # print(f'get_obj: {offset=}')
6428        obj_type = ObjectType(read_uint64(self.base_address, offset))
6429        if obj_type is ObjectType.tfree_memory:
6430            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6431            raise RuntimeError
6432        
6433        codec = codec_by_type[obj_type]
6434        return codec.init_from_shared_memory(self, offset)
6435
6436    def get_obj_buffer(self, offset: int) -> memoryview:
6437        # print(f'get_obj: {offset=}')
6438        obj_type = ObjectType(read_uint64(self.base_address, offset))
6439        if obj_type is ObjectType.tfree_memory:
6440            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6441            raise RuntimeError
6442        
6443        codec = codec_by_type[obj_type]
6444        return codec.buffer(self, offset)
6445
6446    def get_obj_buffer_2(self, offset: int) -> Tuple[int, int]:
6447        # print(f'get_obj: {offset=}')
6448        obj_type = ObjectType(read_uint64(self.base_address, offset))
6449        if obj_type is ObjectType.tfree_memory:
6450            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6451            raise RuntimeError
6452        
6453        codec = codec_by_type[obj_type]
6454        return codec.buffer_2(self, offset)
6455
6456    def get_obj_mem_view(self, offset: int) -> memoryview:
6457        return self.mem_view(*self.get_obj_buffer_2(offset))
6458
6459    def destroy_obj(self, offset: int) -> Any:
6460        obj_type = ObjectType(read_uint64(self.base_address, offset))
6461        codec = codec_by_type[obj_type]
6462        return codec.destroy(self, offset)
6463
6464    # ----------------------------
6465
6466    def map_object(self, obj: Any) -> Any:
6467        # self.update_free_memory_search_start()
6468        mapped_obj, offset, size = self.put_obj(obj)
6469        # self.commit_free_memory_search_start()
6470        return mapped_obj
6471
6472    def get_object(self, offset: Offset) -> Any:
6473        return self.get_obj(offset)
6474
6475    def destroy_object(self, offset: Offset) -> Any:
6476        return self.destroy_obj(offset)
6477
6478    # ----------------------------
6479
6480    def write_message(self, obj: Any) -> Tuple[Any, Offset, Offset]:
6481        # self.update_free_memory_search_start()
6482        message_offset, message_real_size = self.malloc(ObjectType.tmessage, 24)
6483        try:
6484            mapped_obj, offset, size = self.put_obj(obj)
6485            # self.commit_free_memory_search_start()
6486            last_message_offset: Offset = self.get_last_message_offset()
6487            if last_message_offset:
6488                write_uint64(self.base_address, last_message_offset + 16 + 8, message_offset)
6489            else:
6490                self.set_first_message_offset(message_offset)
6491            
6492            write_uint64(self.base_address, message_offset + 16 + 0, last_message_offset)
6493            write_uint64(self.base_address, message_offset + 16 + 8, 0)
6494            write_uint64(self.base_address, message_offset + 16 + 16, offset)
6495            self.set_last_message_offset(message_offset)
6496        except:
6497            self.free(message_offset)
6498            raise
6499
6500        return mapped_obj, offset, message_offset
6501
6502    def put_message(self, obj: Any) -> Any:
6503        mapped_obj, offset, message_offset = self.write_message(obj)
6504        return mapped_obj
6505    
6506    def put_message_2(self, obj: Any) -> Tuple[Any, Offset]:
6507        mapped_obj, offset, message_offset = self.write_message(obj)
6508        return mapped_obj, offset
6509
6510    def has_messages(self) -> bool:
6511        return self.get_last_message_offset() != 0
6512
6513    def read_message_info(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset], Optional[Offset]]:
6514        # print(0)
6515        if QueueType.fifo == queue_type:
6516            message_offset = self.get_first_message_offset()
6517            # print(f'0.0| {message_offset=}')
6518            if not message_offset:
6519                return None, None, None
6520            
6521            next_message_offset = read_uint64(self.base_address, message_offset + 16 + 8)
6522            self.set_first_message_offset(next_message_offset)
6523            if next_message_offset:
6524                write_uint64(self.base_address, next_message_offset + 16 + 0, 0)
6525            else:
6526                self.set_last_message_offset(0)
6527        else:
6528            message_offset = self.get_last_message_offset()
6529            # print(f'0.1| {message_offset=}')
6530            if not message_offset:
6531                return None, None, None
6532            
6533            prev_message_offset = read_uint64(self.base_address, message_offset + 16 + 0)
6534            self.set_last_message_offset(prev_message_offset)
6535            if prev_message_offset:
6536                write_uint64(self.base_address, prev_message_offset + 16 + 8, 0)
6537            else:
6538                self.set_first_message_offset(0)
6539        
6540        # print(1)
6541        obj_offset = read_uint64(self.base_address, message_offset + 16 + 16)
6542        # print(2)
6543        if not obj_offset:
6544            return None, None, message_offset
6545
6546        # print(3)
6547        obj = self.get_obj(obj_offset)
6548        # print(4)
6549        return obj, obj_offset, message_offset
6550
6551    def destroy_message(self, message_offset: Offset):
6552        if not message_offset:
6553            return
6554        
6555        # obj_offset = read_uint64(self.base_address, message_offset + 16 + 16)
6556        # if obj_offset:
6557        #     self.destroy_obj(obj_offset)
6558        
6559        # self.destroy_obj(message_offset)
6560
6561        self.free(message_offset)
6562    
6563    def read_message(self, queue_type: QueueType = QueueType.fifo) -> Any:
6564        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6565        if message_offset:
6566            return obj
6567        else:
6568            raise NoMessagesInQueueError
6569    
6570    def read_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]:
6571        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6572        if message_offset:
6573            return obj, obj_offset
6574        else:
6575            raise NoMessagesInQueueError
6576
6577    def take_message(self, queue_type: QueueType = QueueType.fifo) -> Any:
6578        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6579        if message_offset:
6580            self.destroy_message(message_offset)
6581        else:
6582            raise NoMessagesInQueueError
6583        
6584        return obj
6585
6586    def take_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]:
6587        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6588        if message_offset:
6589            self.destroy_message(message_offset)
6590        else:
6591            raise NoMessagesInQueueError
6592        
6593        return obj, obj_offset
6594    
6595    def get_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any:
6596        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6597        if message_offset:
6598            return obj
6599        else:
6600            return default
6601    
6602    def get_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]:
6603        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6604        if message_offset:
6605            return obj, obj_offset
6606        else:
6607            return default, None
6608
6609    def pop_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any:
6610        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6611        if message_offset:
6612            self.destroy_message(message_offset)
6613        else:
6614            obj = default
6615        
6616        return obj
6617
6618    def pop_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]:
6619        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6620        if message_offset:
6621            self.destroy_message(message_offset)
6622        else:
6623            obj = default
6624            obj_offset = None
6625        
6626        return obj, obj_offset
6627
6628    # ----------------------------
6629
6630    def get_in_line(self) -> bool:
6631        if self._create:
6632            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6633            write_uint64(self.base_address, self.sys_values_offset + 72, 1)
6634            full_memory_barrier()
6635            if self.consumer_in_charge():
6636                return False
6637            else:
6638                write_uint64(self.base_address, self.sys_values_offset + 56, 1)
6639                full_memory_barrier()
6640                write_uint64(self.base_address, self.sys_values_offset + 72, 0)
6641                full_memory_barrier()
6642                self.update_free_memory_search_start()
6643                if self.consumer_in_charge():
6644                    write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6645                    full_memory_barrier()
6646                    write_uint64(self.base_address, self.sys_values_offset + 72, 1)
6647                    full_memory_barrier()
6648                    return False
6649
6650                return True
6651        else:
6652            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6653            write_uint64(self.base_address, self.sys_values_offset + 80, 1)
6654            full_memory_barrier()
6655            if self.creator_in_charge():
6656                return False
6657            else:
6658                write_uint64(self.base_address, self.sys_values_offset + 64, 1)
6659                full_memory_barrier()
6660                write_uint64(self.base_address, self.sys_values_offset + 80, 0)
6661                full_memory_barrier()
6662                self.update_free_memory_search_start()
6663                if self.creator_in_charge():
6664                    write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6665                    full_memory_barrier()
6666                    write_uint64(self.base_address, self.sys_values_offset + 80, 1)
6667                    full_memory_barrier()
6668                    return False
6669                
6670                return True
6671            
6672    def release(self):
6673        self.commit_free_memory_search_start()
6674        if self._create:
6675            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6676            write_uint64(self.base_address, self.sys_values_offset + 72, 0)
6677            full_memory_barrier()
6678        else:
6679            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6680            write_uint64(self.base_address, self.sys_values_offset + 80, 0)
6681            full_memory_barrier()
6682
6683    def wait_my_turn(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
6684        start_time = cpu_clock()
6685        while not self.get_in_line():
6686            if time_limit is not None:
6687                if (cpu_clock() - start_time) > time_limit:
6688                    return False
6689            
6690            if periodic_sleep_time is None:
6691                mm_pause()
6692            else:
6693                hps_sleep(periodic_sleep_time)
6694        
6695        return True
6696
6697    async def await_my_turn(self, time_limit: Optional[RationalNumber] = None) -> bool:
6698        start_time = cpu_clock()
6699        while not self.get_in_line():
6700            if time_limit is not None:
6701                if (cpu_clock() - start_time) > time_limit:
6702                    return False
6703            
6704            await self._asleep_func()
6705        
6706        return True
6707
6708    # ----------------------------
6709
6710    def wait_for_messages(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
6711        start_time = cpu_clock()
6712        has_messages = False
6713        while not has_messages:
6714            if time_limit is not None:
6715                if (cpu_clock() - start_time) > time_limit:
6716                    return False
6717            
6718            if periodic_sleep_time is None:
6719                mm_pause()
6720            else:
6721                hps_sleep(periodic_sleep_time)
6722
6723            with wait_my_turn(self):
6724                has_messages = self.has_messages()
6725        
6726        return True
6727
6728    async def await_for_messages(self, time_limit: Optional[RationalNumber] = None) -> bool:
6729        start_time = cpu_clock()
6730        has_messages = False
6731        while not has_messages:
6732            if time_limit is not None:
6733                if (cpu_clock() - start_time) > time_limit:
6734                    return False
6735            
6736            await self._asleep_func()
6737
6738            with await_my_turn(self, time_limit):
6739                has_messages = self.has_messages()
6740        
6741        return True
6742
6743    # ----------------------------
6744
6745    @staticmethod
6746    def _get_obj_type(obj: Any) -> ObjectType:
6747        obj_type = type(obj)
6748        if obj is None:
6749            obj_type_atom: ObjectType = ObjectType.tnone
6750        elif obj_type is bool:
6751            obj_type_atom = ObjectType.tbool
6752        elif obj_type is int:
6753            obj_type_atom = ObjectType.tint
6754        elif obj_type is float:
6755            obj_type_atom = ObjectType.tfloat
6756        elif obj_type is complex:
6757            obj_type_atom = ObjectType.tcomplex
6758        elif obj_type is Decimal:
6759            obj_type_atom = ObjectType.tdecimal
6760        elif obj_type is slice:
6761            obj_type_atom = ObjectType.tslice
6762        elif obj_type is str:
6763            obj_type_atom = ObjectType.tstr
6764        elif obj_type is bytes:
6765            obj_type_atom = ObjectType.tbytes
6766        elif obj_type is bytearray:
6767            obj_type_atom = ObjectType.tbytearray
6768        elif obj_type is tuple:
6769            obj_type_atom = ObjectType.ttuple
6770        elif obj_type is list:
6771            obj_type_atom = ObjectType.tlist
6772        elif obj_type in {datetime, timedelta, timezone, date, time}:
6773            obj_type_atom = ObjectType.tdatetime
6774        elif issubclass(obj_type, FastLimitedSet):
6775            obj_type_atom = ObjectType.tfastset
6776        elif issubclass(obj_type, AbsMutableSet):
6777            obj_type_atom = ObjectType.tmutableset
6778        elif issubclass(obj_type, AbsSet):
6779            obj_type_atom = ObjectType.tset
6780        elif issubclass(obj_type, FastLimitedDict):
6781            obj_type_atom = ObjectType.tfastdict
6782        elif issubclass(obj_type, ForceMapping):
6783            obj_type_atom = ObjectType.tmapping
6784        elif issubclass(obj_type, AbsMutableMapping):
6785            obj_type_atom = ObjectType.tmutablemapping
6786        elif issubclass(obj_type, AbsMapping):
6787            obj_type_atom = ObjectType.tmapping
6788        elif obj_type is SmallInt:
6789            obj_type_atom = ObjectType.tsmallint
6790        elif obj_type is BigInt:
6791            obj_type_atom = ObjectType.tbigint
6792        elif issubclass(obj_type, Tensor):
6793            obj_type_atom = ObjectType.ttorchtensor
6794        elif issubclass(obj_type, np.ndarray):
6795            obj_type_atom = ObjectType.tnumpyndarray
6796        elif issubclass(obj_type, (ForceGeneralObjectCopy, ForceGeneralObjectInplace)):
6797            obj_type_atom = ObjectType.tgeneralobject
6798        elif issubclass(obj_type, (ForceStaticObjectCopy, ForceStaticObjectInplace)):
6799            obj_type_atom = ObjectType.tstaticobject
6800        elif obj_type in obj_type_map:
6801            obj_type_atom = obj_type_map[obj_type]
6802        # elif hasattr(obj, '__dict__'):
6803        #     obj_type_atom = ObjectType.tgeneralobject
6804        # else:
6805        #     obj_type_atom = ObjectType.tpickable
6806        elif hasattr(obj, '__slots__') or ((not hasattr(obj, '__slots__')) and (not hasattr(obj, '__dict__'))):
6807            obj_type_atom = ObjectType.tstaticobjectwithslots
6808        else:
6809            # obj_type_atom = ObjectType.tgeneralobject
6810            obj_type_atom = ObjectType.tstaticobject
6811        
6812        return obj_type_atom
6813
6814
6815# @contextmanager
6816# def get_in_line(shared_memory: SharedMemory):
6817#     shared_memory.get_in_line()
6818#     try:
6819#         yield
6820#     finally:
6821#         shared_memory.release()
6822
6823
6824class GetInLine:
6825    def __init__(self, shared_memory: SharedMemory):
6826        self.shared_memory: SharedMemory = shared_memory
6827    
6828    def __enter__(self):
6829        self.shared_memory.get_in_line()
6830        return
6831    
6832    def __exit__(self, exc_type, exc_value, traceback):
6833        self.shared_memory.release()
6834
6835
6836get_in_line = GetInLine
6837
6838
6839# @contextmanager
6840# def wait_my_turn(shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6841#     shared_memory.wait_my_turn(time_limit, periodic_sleep_time)
6842#     try:
6843#         yield
6844#     finally:
6845#         shared_memory.release()
6846
6847
6848class WaitMyTurn:
6849    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6850        self.shared_memory: SharedMemory = shared_memory
6851        self.time_limit: Optional[RationalNumber] = time_limit
6852        self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
6853    
6854    def __enter__(self):
6855        self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time)
6856        return
6857    
6858    def __exit__(self, exc_type, exc_value, traceback):
6859        self.shared_memory.release()
6860
6861
6862wait_my_turn = WaitMyTurn
6863
6864
6865# @contextmanager
6866# def wait_my_turn_when_has_messages(shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6867#     while True:
6868#         if not shared_memory.wait_my_turn(time_limit, periodic_sleep_time):
6869#             raise OperationTimedOutError
6870        
6871#         try:
6872#             if not shared_memory.has_messages():
6873#                 continue
6874
6875#             yield
6876#             break
6877#         finally:
6878#             shared_memory.release()
6879
6880
6881class WaitMyTurnWhenHasMessages:
6882    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6883        self.shared_memory: SharedMemory = shared_memory
6884        self.time_limit: Optional[RationalNumber] = time_limit
6885        self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
6886    
6887    def __enter__(self):
6888        while True:
6889            if not self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time):
6890                raise OperationTimedOutError
6891            
6892            if self.shared_memory.has_messages():
6893                return
6894            else:
6895                self.shared_memory.release()
6896    
6897    def __exit__(self, exc_type, exc_value, traceback):
6898        self.shared_memory.release()
6899
6900
6901wait_my_turn_when_has_messages = WaitMyTurnWhenHasMessages
6902
6903
6904class await_my_turn:
6905    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None):
6906        self.shared_memory: SharedMemory = shared_memory
6907        self.time_limit: Optional[RationalNumber] = time_limit
6908    
6909    async def __aenter__(self):
6910        await self.shared_memory.await_my_turn(self.time_limit)
6911    
6912    async def __aexit__(self, exc_type, exc_val, exc_tb):
6913        self.shared_memory.release()
6914
6915
6916def numpy_array_memory_size(np_shape, np_dtype):
6917    num_elements = np.prod(np_shape)
6918    element_size = np.dtype(np_dtype).itemsize
6919    memory_size_bytes = num_elements * element_size
6920    return memory_size_bytes
6921
6922
6923def numpy_array_made_from_pointer_memory_size(np_shape, ctypes_type) -> int:
6924    num_elements: int = np.prod(np_shape)
6925    element_size: int = ctypes.sizeof(ctypes_type)
6926    memory_size_bytes: int = num_elements * element_size
6927    return memory_size_bytes
6928
6929
6930from ctypes import _SimpleCData
6931
6932def make_numpy_array_from_obj_offset(shared_memory: SharedMemory, offset: Offset, np_shape, np_dtype_or_ctypes_type = None) -> Any:
6933    if np_dtype_or_ctypes_type is None:
6934        np_dtype_or_ctypes_type = ctypes.c_uint8
6935    
6936    data_offset, data_size = shared_memory.get_obj_buffer_2(offset)
6937    if isinstance(np_dtype_or_ctypes_type, _SimpleCData):
6938        num_elements = np.prod(np_shape)
6939        np_array_size = num_elements * ctypes.sizeof(np_dtype_or_ctypes_type)
6940        if data_size < np_array_size:
6941            raise ObjBufferIsSmallerThanRequestedNumpyArrayError(data_size, np_array_size)
6942        
6943        data_address = shared_memory.base_address + data_offset
6944        void_ptr = ctypes.c_void_p(data_address)
6945        # actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type * num_elements))
6946        actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type))
6947        return np.ctypeslib.as_array(actual_ptr, shape=np_shape)
6948    else:
6949        return np.ndarray(np_shape, dtype=np_dtype_or_ctypes_type, buffer=shared_memory.mem_view(data_offset, data_size))
6950
6951
6952def zero_bytes_from_numpy_array(np: np.ndarray) -> bytes:
6953    return bytes(np.nbytes)
6954
6955
6956def bytes_from_numpy_array(np: np.ndarray) -> bytes:
6957    return np.tobytes()
6958
6959
6960def dict_to_list(mapping: AbsMapping) -> List:
6961    items_num = max(mapping.keys())
6962    result = [None] * items_num
6963    for key, value in mapping.items():
6964        result[key] = value
6965    
6966    return result
6967
6968
6969def list_to_dict(data_list: List) -> Dict:
6970    return {key: value for key, value in enumerate(data_list)}
6971
6972
6973def intenum_dict_to_list(mapping: AbsMapping, int_enum_class: Optional[Type] = None) -> List:
6974    if int_enum_class:
6975        items_num = len(int_enum_class)
6976    else:
6977        first_key_type_detected: bool = False
6978        for first_key in mapping.keys():
6979            first_key_type = type(first_key)
6980            if issubclass(first_key_type, IntEnum):
6981                items_num = len(first_key_type)
6982                first_key_type_detected = True
6983        
6984        if not first_key_type_detected:
6985            items_num = max(mapping.keys(), key=lambda value: int(value))
6986    
6987    result = [None] * items_num
6988    for key, value in mapping.items():
6989        result[int(key)] = value
6990    
6991    return result
6992
6993
6994def intenum_list_to_dict(data_list: List, int_enum_class: Optional[Type] = None) -> Dict:
6995    if int_enum_class:
6996        return {int_enum_class(key): value for key, value in enumerate(data_list)}
6997    else:
6998        return {key: value for key, value in enumerate(data_list)}
DEBUG = False
current_shared_memory_instance: SharedMemory = None
class QueueType(enum.IntEnum):
116class QueueType(IntEnum):
117    fifo = 0
118    lifo = 1

An enumeration.

fifo = <QueueType.fifo: 0>
lifo = <QueueType.lifo: 1>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class ObjectType(enum.IntEnum):
121class ObjectType(IntEnum):
122    tfree_memory = 0
123    tmessage = 1
124    tnone = 2
125    tbool = 3
126    tint = 4
127    tfloat = 5
128    tcomplex = 6
129    tstr = 7
130    tbytes = 8
131    tbytearray = 9
132    ttuple = 10
133    tlist = 11
134    tmutableset = 12
135    tset = 13
136    tmutablemapping = 14
137    tmapping = 15
138    tfastdict = 16
139    tclass = 17
140    tpickable = 18
141    tinternal_list = 19
142    tsmallint = 20
143    tbigint = 21
144    tgeneralobject = 22
145    tnumpyndarray = 23
146    ttorchtensor = 24
147    tstaticobject = 25
148    tfastset = 26
149    tslice = 27
150    tdecimal = 28
151    tdatetime = 29
152    tstaticobjectwithslots = 30

An enumeration.

tfree_memory = <ObjectType.tfree_memory: 0>
tmessage = <ObjectType.tmessage: 1>
tnone = <ObjectType.tnone: 2>
tbool = <ObjectType.tbool: 3>
tint = <ObjectType.tint: 4>
tfloat = <ObjectType.tfloat: 5>
tcomplex = <ObjectType.tcomplex: 6>
tstr = <ObjectType.tstr: 7>
tbytes = <ObjectType.tbytes: 8>
tbytearray = <ObjectType.tbytearray: 9>
ttuple = <ObjectType.ttuple: 10>
tlist = <ObjectType.tlist: 11>
tmutableset = <ObjectType.tmutableset: 12>
tset = <ObjectType.tset: 13>
tmutablemapping = <ObjectType.tmutablemapping: 14>
tmapping = <ObjectType.tmapping: 15>
tfastdict = <ObjectType.tfastdict: 16>
tclass = <ObjectType.tclass: 17>
tpickable = <ObjectType.tpickable: 18>
tinternal_list = <ObjectType.tinternal_list: 19>
tsmallint = <ObjectType.tsmallint: 20>
tbigint = <ObjectType.tbigint: 21>
tgeneralobject = <ObjectType.tgeneralobject: 22>
tnumpyndarray = <ObjectType.tnumpyndarray: 23>
ttorchtensor = <ObjectType.ttorchtensor: 24>
tstaticobject = <ObjectType.tstaticobject: 25>
tfastset = <ObjectType.tfastset: 26>
tslice = <ObjectType.tslice: 27>
tdecimal = <ObjectType.tdecimal: 28>
tdatetime = <ObjectType.tdatetime: 29>
tstaticobjectwithslots = <ObjectType.tstaticobjectwithslots: 30>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class SysValuesOffsets(enum.IntEnum):
155class SysValuesOffsets(IntEnum):
156    total_mem_size = 0
157    data_start_offset = 1
158    data_size = 2
159    data_end_offset = 3
160    free_memory_search_start = 4
161    first_message_offset = 5
162    last_message_offset = 6
163    creator_in_charge = 7
164    consumer_in_charge = 8
165    creator_wants_to_be_in_charge = 9
166    consumer_wants_to_be_in_charge = 10
167    creator_ready = 11
168    consumer_ready = 12

An enumeration.

total_mem_size = <SysValuesOffsets.total_mem_size: 0>
data_start_offset = <SysValuesOffsets.data_start_offset: 1>
data_size = <SysValuesOffsets.data_size: 2>
data_end_offset = <SysValuesOffsets.data_end_offset: 3>
free_memory_search_start = <SysValuesOffsets.free_memory_search_start: 4>
first_message_offset = <SysValuesOffsets.first_message_offset: 5>
last_message_offset = <SysValuesOffsets.last_message_offset: 6>
creator_in_charge = <SysValuesOffsets.creator_in_charge: 7>
consumer_in_charge = <SysValuesOffsets.consumer_in_charge: 8>
creator_wants_to_be_in_charge = <SysValuesOffsets.creator_wants_to_be_in_charge: 9>
consumer_wants_to_be_in_charge = <SysValuesOffsets.consumer_wants_to_be_in_charge: 10>
creator_ready = <SysValuesOffsets.creator_ready: 11>
consumer_ready = <SysValuesOffsets.consumer_ready: 12>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
Offset = <class 'int'>
Size = <class 'int'>
minimal_memory_block_size = 8
block_size = 8
bs = 8
class SharedMemoryError(builtins.Exception):
178class SharedMemoryError(Exception):
179    pass

Common base class for all non-exit exceptions.

Inherited Members
builtins.Exception
Exception
builtins.BaseException
with_traceback
args
class OperationTimedOutError(SharedMemoryError):
182class OperationTimedOutError(SharedMemoryError):
183    pass

Common base class for all non-exit exceptions.

Inherited Members
builtins.Exception
Exception
builtins.BaseException
with_traceback
args
class FreeMemoryChunkNotFoundError(SharedMemoryError):
186class FreeMemoryChunkNotFoundError(SharedMemoryError):
187    """Indicates that an unpartitioned chunk of free memory of requested size not being found.
188
189        Regarding this error, it’s important to adjust the size parameter in the SharedMemory configuration. Trying to estimate memory consumption down to the byte is not practical because it fails to account for the memory overhead required by each entity stored (such as entity type metadata, pointers to child entities, etc.).
190
191        When setting the size parameter for SharedMemory, consider using broader units like tens (for embedded systems), hundreds, or thousands of megabytes, rather than precise byte counts. This approach is similar to how you would not precisely calculate the amount of memory needed for a web server hosted externally; you make an educated guess, like assuming that 256 MB might be insufficient but 768 MB could be adequate, and then adjust based on practical testing.
192
193        Also, be aware of memory fragmentation, which affects all memory allocation systems, including the OS itself. For example, if you have a SharedMemory pool sized to store exactly ten 64-bit integers, accounting for additional bytes for system information, your total might be around 200 bytes. Initially, after storing the integers, your memory might appear as ["int", "int", ..., "int"]. If you delete every second integer, the largest contiguous free memory chunk could be just 10 bytes, despite having 50 bytes free in total. This fragmentation means you cannot store a larger data structure like a 20-byte string which needs contiguous space.
194
195        To resolve this, simply increase the size parameter value of SharedMemory. This is akin to how you would manage memory allocation for server hosting or thread stack sizes in software development.
196
197    Args:
198        SharedMemoryError (_type_): _description_
199    """
200    pass

Indicates that an unpartitioned chunk of free memory of requested size not being found.

Regarding this error, it’s important to adjust the size parameter in the SharedMemory configuration. Trying to estimate memory consumption down to the byte is not practical because it fails to account for the memory overhead required by each entity stored (such as entity type metadata, pointers to child entities, etc.).

When setting the size parameter for SharedMemory, consider using broader units like tens (for embedded systems), hundreds, or thousands of megabytes, rather than precise byte counts. This approach is similar to how you would not precisely calculate the amount of memory needed for a web server hosted externally; you make an educated guess, like assuming that 256 MB might be insufficient but 768 MB could be adequate, and then adjust based on practical testing.

Also, be aware of memory fragmentation, which affects all memory allocation systems, including the OS itself. For example, if you have a SharedMemory pool sized to store exactly ten 64-bit integers, accounting for additional bytes for system information, your total might be around 200 bytes. Initially, after storing the integers, your memory might appear as ["int", "int", ..., "int"]. If you delete every second integer, the largest contiguous free memory chunk could be just 10 bytes, despite having 50 bytes free in total. This fragmentation means you cannot store a larger data structure like a 20-byte string which needs contiguous space.

To resolve this, simply increase the size parameter value of SharedMemory. This is akin to how you would manage memory allocation for server hosting or thread stack sizes in software development.

Args: SharedMemoryError (_type_): _description_

Inherited Members
builtins.Exception
Exception
builtins.BaseException
with_traceback
args
class ObjBufferIsSmallerThanRequestedNumpyArrayError(SharedMemoryError):
203class ObjBufferIsSmallerThanRequestedNumpyArrayError(SharedMemoryError):
204    pass

Common base class for all non-exit exceptions.

Inherited Members
builtins.Exception
Exception
builtins.BaseException
with_traceback
args
class WrongObjectTypeError(SharedMemoryError):
207class WrongObjectTypeError(SharedMemoryError):
208    pass

Common base class for all non-exit exceptions.

Inherited Members
builtins.Exception
Exception
builtins.BaseException
with_traceback
args
class NoMessagesInQueueError(SharedMemoryError):
211class NoMessagesInQueueError(SharedMemoryError):
212    pass

Common base class for all non-exit exceptions.

Inherited Members
builtins.Exception
Exception
builtins.BaseException
with_traceback
args
def nearest_size(size: int) -> int:
215def nearest_size(size: Size) -> Size:
216    return ((size // 8) * 8 + 8) if size % 8 else size
def nsize(size: int) -> int:
215def nearest_size(size: Size) -> Size:
216    return ((size // 8) * 8 + 8) if size % 8 else size
class BaseIObject:
222class BaseIObject:
223    pass
class BaseObjOffsets(enum.IntEnum):
227class BaseObjOffsets(IntEnum):
228    obj_type = 0
229    obj_size = 1

An enumeration.

obj_type = <BaseObjOffsets.obj_type: 0>
obj_size = <BaseObjOffsets.obj_size: 1>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
BaseObjOffsetsLen: int = 2
bsBaseObjOffsetsLen: int = 16
class TBase:
236class TBase:
237    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
238        raise NotImplementedError
239    
240    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
241        raise NotImplementedError
242    
243    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
244        raise NotImplementedError
245    
246    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
247        raise NotImplementedError
248    
249    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
250        raise NotImplementedError
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: typing.Any) -> Tuple[Any, int, int]:
237    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
238        raise NotImplementedError
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> Any:
240    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
241        raise NotImplementedError
def destroy( self, shared_memory: SharedMemory, offset: int):
243    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
244        raise NotImplementedError
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
246    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
247        raise NotImplementedError
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
249    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
250        raise NotImplementedError
class TNone:
257class TNone:
258    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: None) -> Tuple[None, Offset, Size]:
259        offset, real_size = shared_memory.malloc(ObjectType.tnone, 0)
260        return obj, offset, real_size
261    
262    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
263        if ObjectType.tnone != read_uint64(shared_memory.base_address, offset):
264            raise WrongObjectTypeError
265
266        return None
267    
268    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
269        if ObjectType.tnone != read_uint64(shared_memory.base_address, offset):
270            raise WrongObjectTypeError
271
272        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: None) -> Tuple[NoneType, int, int]:
258    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: None) -> Tuple[None, Offset, Size]:
259        offset, real_size = shared_memory.malloc(ObjectType.tnone, 0)
260        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> None:
262    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
263        if ObjectType.tnone != read_uint64(shared_memory.base_address, offset):
264            raise WrongObjectTypeError
265
266        return None
def destroy( self, shared_memory: SharedMemory, offset: int):
268    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
269        if ObjectType.tnone != read_uint64(shared_memory.base_address, offset):
270            raise WrongObjectTypeError
271
272        shared_memory.free(offset)
class IntOffsets(enum.IntEnum):
279class IntOffsets(IntEnum):
280    data = 0

An enumeration.

data = <IntOffsets.data: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TInt:
283class TInt:
284    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
285        offset, real_size = shared_memory.malloc(ObjectType.tint, 8)
286        write_int64(shared_memory.base_address, offset + 16 + 0, obj)
287        return obj, offset, real_size
288    
289    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
290        if ObjectType.tint != read_uint64(shared_memory.base_address, offset + 0):
291            raise WrongObjectTypeError
292
293        return read_int64(shared_memory.base_address, offset + 16 + 0)
294    
295    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
296        if ObjectType.tint != read_uint64(shared_memory.base_address, offset + 0):
297            raise WrongObjectTypeError
298
299        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: int) -> Tuple[int, int, int]:
284    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
285        offset, real_size = shared_memory.malloc(ObjectType.tint, 8)
286        write_int64(shared_memory.base_address, offset + 16 + 0, obj)
287        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> int:
289    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
290        if ObjectType.tint != read_uint64(shared_memory.base_address, offset + 0):
291            raise WrongObjectTypeError
292
293        return read_int64(shared_memory.base_address, offset + 16 + 0)
def destroy( self, shared_memory: SharedMemory, offset: int):
295    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
296        if ObjectType.tint != read_uint64(shared_memory.base_address, offset + 0):
297            raise WrongObjectTypeError
298
299        shared_memory.free(offset)
class SmallInt(builtins.int):
306class SmallInt(int):
307    ...

int([x]) -> integer int(x, base=10) -> integer

Convert a number or string to an integer, or return 0 if no arguments are given. If x is a number, return x.__int__(). For floating point numbers, this truncates towards zero.

If x is not a number or if base is given, then x must be a string, bytes, or bytearray instance representing an integer literal in the given base. The literal can be preceded by '+' or '-' and be surrounded by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. Base 0 means to interpret the base from the string as an integer literal.

>>> int('0b100', base=0)
4
Inherited Members
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
smallint = <class 'SmallInt'>
sint = <class 'SmallInt'>
class SmallIntOffsets(enum.IntEnum):
314class SmallIntOffsets(IntEnum):
315    data = 0

An enumeration.

data = <SmallIntOffsets.data: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TSmallInt:
318class TSmallInt:
319    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
320        offset, real_size = shared_memory.malloc(ObjectType.tsmallint, 8)
321        write_int64(shared_memory.base_address, offset + 16 + 0, obj)
322        return obj, offset, real_size
323    
324    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
325        if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + 0):
326            raise WrongObjectTypeError
327
328        return read_int64(shared_memory.base_address, offset + 16 + 0)
329    
330    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
331        if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + 0):
332            raise WrongObjectTypeError
333
334        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: int) -> Tuple[int, int, int]:
319    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
320        offset, real_size = shared_memory.malloc(ObjectType.tsmallint, 8)
321        write_int64(shared_memory.base_address, offset + 16 + 0, obj)
322        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> int:
324    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
325        if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + 0):
326            raise WrongObjectTypeError
327
328        return read_int64(shared_memory.base_address, offset + 16 + 0)
def destroy( self, shared_memory: SharedMemory, offset: int):
330    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
331        if ObjectType.tsmallint != read_uint64(shared_memory.base_address, offset + 0):
332            raise WrongObjectTypeError
333
334        shared_memory.free(offset)
class BigInt(builtins.int):
341class BigInt(int):
342    ...

int([x]) -> integer int(x, base=10) -> integer

Convert a number or string to an integer, or return 0 if no arguments are given. If x is a number, return x.__int__(). For floating point numbers, this truncates towards zero.

If x is not a number or if base is given, then x must be a string, bytes, or bytearray instance representing an integer literal in the given base. The literal can be preceded by '+' or '-' and be surrounded by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. Base 0 means to interpret the base from the string as an integer literal.

>>> int('0b100', base=0)
4
Inherited Members
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
bigint = <class 'BigInt'>
bint = <class 'BigInt'>
class BigIntOffsets(enum.IntEnum):
349class BigIntOffsets(IntEnum):
350    data_size = 0
351    data = 1

An enumeration.

data_size = <BigIntOffsets.data_size: 0>
data = <BigIntOffsets.data: 1>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TBigInt:
354class TBigInt:
355    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
356        data = bint_to_bytes(obj)
357        data_size = len(data)
358        # offset, real_size = shared_memory.malloc(ObjectType.tbigint, 16 + 8 * data_size)
359        offset, real_size = shared_memory.malloc(ObjectType.tbigint, 16 + data_size)
360        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
361        data_offset = offset + 16 + 8
362        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
363        return obj, offset, real_size
364    
365    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
366        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
367            raise WrongObjectTypeError
368
369        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
370        if data_size:
371            data_offset = offset + 16 + 8
372            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
373            return bytes_to_bint(data)
374        else:
375            return 0
376    
377    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
378        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
379            raise WrongObjectTypeError
380
381        shared_memory.free(offset)
382    
383    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
384        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
385            raise WrongObjectTypeError
386
387        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
388        data_offset = offset + 16 + 8
389        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
390    
391    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
392        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
393            raise WrongObjectTypeError
394
395        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
396        data_offset = offset + 16 + 8
397        return data_offset, data_size
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: int) -> Tuple[int, int, int]:
355    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: int) -> Tuple[int, Offset, Size]:
356        data = bint_to_bytes(obj)
357        data_size = len(data)
358        # offset, real_size = shared_memory.malloc(ObjectType.tbigint, 16 + 8 * data_size)
359        offset, real_size = shared_memory.malloc(ObjectType.tbigint, 16 + data_size)
360        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
361        data_offset = offset + 16 + 8
362        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
363        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> int:
365    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> int:
366        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
367            raise WrongObjectTypeError
368
369        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
370        if data_size:
371            data_offset = offset + 16 + 8
372            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
373            return bytes_to_bint(data)
374        else:
375            return 0
def destroy( self, shared_memory: SharedMemory, offset: int):
377    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
378        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
379            raise WrongObjectTypeError
380
381        shared_memory.free(offset)
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
383    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
384        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
385            raise WrongObjectTypeError
386
387        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
388        data_offset = offset + 16 + 8
389        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
391    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
392        if ObjectType.tbigint != read_uint64(shared_memory.base_address, offset + 0):
393            raise WrongObjectTypeError
394
395        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
396        data_offset = offset + 16 + 8
397        return data_offset, data_size
class BoolOffsets(enum.IntEnum):
404class BoolOffsets(IntEnum):
405    data = 0

An enumeration.

data = <BoolOffsets.data: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TBool:
408class TBool:
409    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bool) -> Tuple[bool, Offset, Size]:
410        offset, real_size = shared_memory.malloc(ObjectType.tbool, 8)
411        write_uint64(shared_memory.base_address, offset + 16 + 0, int(obj))
412        return obj, offset, real_size
413    
414    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bool:
415        if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + 0):
416            raise WrongObjectTypeError
417
418        return bool(read_uint64(shared_memory.base_address, offset + 16 + 0))
419    
420    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
421        if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + 0):
422            raise WrongObjectTypeError
423
424        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: bool) -> Tuple[bool, int, int]:
409    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bool) -> Tuple[bool, Offset, Size]:
410        offset, real_size = shared_memory.malloc(ObjectType.tbool, 8)
411        write_uint64(shared_memory.base_address, offset + 16 + 0, int(obj))
412        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> bool:
414    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bool:
415        if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + 0):
416            raise WrongObjectTypeError
417
418        return bool(read_uint64(shared_memory.base_address, offset + 16 + 0))
def destroy( self, shared_memory: SharedMemory, offset: int):
420    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
421        if ObjectType.tbool != read_uint64(shared_memory.base_address, offset + 0):
422            raise WrongObjectTypeError
423
424        shared_memory.free(offset)
class FloatOffsets(enum.IntEnum):
431class FloatOffsets(IntEnum):
432    data = 0

An enumeration.

data = <FloatOffsets.data: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TFloat:
435class TFloat:
436    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: float) -> Tuple[float, Offset, Size]:
437        offset, real_size = shared_memory.malloc(ObjectType.tfloat, 8)
438        write_double(shared_memory.base_address, offset + 16 + 0, obj)
439        return obj, offset, real_size
440    
441    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> float:
442        if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset):
443            raise WrongObjectTypeError
444
445        return read_double(shared_memory.base_address, offset + 16 + 0)
446    
447    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
448        if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset):
449            raise WrongObjectTypeError
450
451        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: float) -> Tuple[float, int, int]:
436    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: float) -> Tuple[float, Offset, Size]:
437        offset, real_size = shared_memory.malloc(ObjectType.tfloat, 8)
438        write_double(shared_memory.base_address, offset + 16 + 0, obj)
439        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> float:
441    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> float:
442        if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset):
443            raise WrongObjectTypeError
444
445        return read_double(shared_memory.base_address, offset + 16 + 0)
def destroy( self, shared_memory: SharedMemory, offset: int):
447    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
448        if ObjectType.tfloat != read_uint64(shared_memory.base_address, offset):
449            raise WrongObjectTypeError
450
451        shared_memory.free(offset)
class BytesOffsets(enum.IntEnum):
458class BytesOffsets(IntEnum):
459    data_size = 0
460    data = 1

An enumeration.

data_size = <BytesOffsets.data_size: 0>
data = <BytesOffsets.data: 1>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TBytes:
463class TBytes:
464    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]:
465        data_size = len(obj)
466        # offset, real_size = shared_memory.malloc(ObjectType.tbytes, 16 + 8 * data_size)
467        offset, real_size = shared_memory.malloc(ObjectType.tbytes, 16 + data_size)
468        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
469        data_offset = offset + 16 + 8
470        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj
471        return obj, offset, real_size
472    
473    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes:
474        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
475            raise WrongObjectTypeError
476
477        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
478        if data_size:
479            data_offset = offset + 16 + 8
480            obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
481            return obj
482        else:
483            return bytes()
484    
485    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
486        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
487            raise WrongObjectTypeError
488
489        shared_memory.free(offset)
490    
491    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
492        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
493            raise WrongObjectTypeError
494
495        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
496        data_offset = offset + 16 + 8
497        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
498    
499    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
500        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
501            raise WrongObjectTypeError
502
503        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
504        data_offset = offset + 16 + 8
505        return data_offset, data_size
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: bytes) -> Tuple[bytes, int, int]:
464    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytes) -> Tuple[bytes, Offset, Size]:
465        data_size = len(obj)
466        # offset, real_size = shared_memory.malloc(ObjectType.tbytes, 16 + 8 * data_size)
467        offset, real_size = shared_memory.malloc(ObjectType.tbytes, 16 + data_size)
468        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
469        data_offset = offset + 16 + 8
470        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = obj
471        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> bytes:
473    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytes:
474        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
475            raise WrongObjectTypeError
476
477        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
478        if data_size:
479            data_offset = offset + 16 + 8
480            obj = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
481            return obj
482        else:
483            return bytes()
def destroy( self, shared_memory: SharedMemory, offset: int):
485    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
486        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
487            raise WrongObjectTypeError
488
489        shared_memory.free(offset)
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
491    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
492        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
493            raise WrongObjectTypeError
494
495        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
496        data_offset = offset + 16 + 8
497        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
499    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
500        if ObjectType.tbytes != read_uint64(shared_memory.base_address, offset + 0):
501            raise WrongObjectTypeError
502
503        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
504        data_offset = offset + 16 + 8
505        return data_offset, data_size
class BytearrayOffsets(enum.IntEnum):
556class BytearrayOffsets(IntEnum):
557    data_size = 0
558    data = 1

An enumeration.

data_size = <BytearrayOffsets.data_size: 0>
data = <BytearrayOffsets.data: 1>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TBytearray:
561class TBytearray:
562    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytearray) -> Tuple[bytearray, Offset, Size]:
563        data = bytes(obj)
564        data_size = len(data)
565        # offset, real_size = shared_memory.malloc(ObjectType.tbytearray, 16 + 8 * data_size)
566        offset, real_size = shared_memory.malloc(ObjectType.tbytearray, 16 + data_size)
567        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
568        data_offset = offset + 16 + 8
569        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
570        return obj, offset, real_size
571    
572    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytearray:
573        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
574            raise WrongObjectTypeError
575
576        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
577        if data_size:
578            data_offset = offset + 16 + 8
579            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
580            return bytearray(data)
581        else:
582            return bytearray(bytes())
583    
584    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
585        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
586            raise WrongObjectTypeError
587
588        shared_memory.free(offset)
589    
590    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
591        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
592            raise WrongObjectTypeError
593
594        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
595        data_offset = offset + 16 + 8
596        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
597    
598    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
599        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
600            raise WrongObjectTypeError
601
602        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
603        data_offset = offset + 16 + 8
604        return data_offset, data_size
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: bytearray) -> Tuple[bytearray, int, int]:
562    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: bytearray) -> Tuple[bytearray, Offset, Size]:
563        data = bytes(obj)
564        data_size = len(data)
565        # offset, real_size = shared_memory.malloc(ObjectType.tbytearray, 16 + 8 * data_size)
566        offset, real_size = shared_memory.malloc(ObjectType.tbytearray, 16 + data_size)
567        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
568        data_offset = offset + 16 + 8
569        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
570        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> bytearray:
572    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> bytearray:
573        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
574            raise WrongObjectTypeError
575
576        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
577        if data_size:
578            data_offset = offset + 16 + 8
579            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
580            return bytearray(data)
581        else:
582            return bytearray(bytes())
def destroy( self, shared_memory: SharedMemory, offset: int):
584    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
585        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
586            raise WrongObjectTypeError
587
588        shared_memory.free(offset)
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
590    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
591        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
592            raise WrongObjectTypeError
593
594        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
595        data_offset = offset + 16 + 8
596        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
598    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
599        if ObjectType.tbytearray != read_uint64(shared_memory.base_address, offset + 0):
600            raise WrongObjectTypeError
601
602        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
603        data_offset = offset + 16 + 8
604        return data_offset, data_size
class StrOffsets(enum.IntEnum):
611class StrOffsets(IntEnum):
612    data_size = 0
613    data = 1

An enumeration.

data_size = <StrOffsets.data_size: 0>
data = <StrOffsets.data: 1>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TStr:
616class TStr:
617    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: str) -> Tuple[str, Offset, Size]:
618        data = str.encode(obj)
619        data_size = len(data)
620        # offset, real_size = shared_memory.malloc(ObjectType.tstr, 16 + 8 * data_size)
621        offset, real_size = shared_memory.malloc(ObjectType.tstr, 16 + data_size)
622        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
623        data_offset = offset + 16 + 8
624        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
625        return obj, offset, real_size
626    
627    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> str:
628        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
629            raise WrongObjectTypeError
630
631        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
632        if data_size:
633            data_offset = offset + 16 + 8
634            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
635            return data.decode()
636        else:
637            return str()
638    
639    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
640        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
641            raise WrongObjectTypeError
642
643        shared_memory.free(offset)
644    
645    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
646        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
647            raise WrongObjectTypeError
648
649        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
650        data_offset = offset + 16 + 8
651        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
652    
653    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
654        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
655            raise WrongObjectTypeError
656
657        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
658        data_offset = offset + 16 + 8
659        return data_offset, data_size
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: str) -> Tuple[str, int, int]:
617    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: str) -> Tuple[str, Offset, Size]:
618        data = str.encode(obj)
619        data_size = len(data)
620        # offset, real_size = shared_memory.malloc(ObjectType.tstr, 16 + 8 * data_size)
621        offset, real_size = shared_memory.malloc(ObjectType.tstr, 16 + data_size)
622        write_uint64(shared_memory.base_address, offset + 16 + 0, data_size)
623        data_offset = offset + 16 + 8
624        shared_memory._shared_memory.buf[data_offset:data_offset + data_size] = data
625        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> str:
627    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> str:
628        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
629            raise WrongObjectTypeError
630
631        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
632        if data_size:
633            data_offset = offset + 16 + 8
634            data = bytes(shared_memory._shared_memory.buf[data_offset:data_offset + data_size])
635            return data.decode()
636        else:
637            return str()
def destroy( self, shared_memory: SharedMemory, offset: int):
639    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
640        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
641            raise WrongObjectTypeError
642
643        shared_memory.free(offset)
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
645    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
646        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
647            raise WrongObjectTypeError
648
649        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
650        data_offset = offset + 16 + 8
651        return shared_memory._shared_memory.buf[data_offset:data_offset + data_size]
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
653    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
654        if ObjectType.tstr != read_uint64(shared_memory.base_address, offset + 0):
655            raise WrongObjectTypeError
656
657        data_size = read_uint64(shared_memory.base_address, offset + 16 + 0)
658        data_offset = offset + 16 + 8
659        return data_offset, data_size
class InternalListTrueOffsets(enum.IntEnum):
667class InternalListTrueOffsets(IntEnum):
668    capacity = 0
669    size = 1

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
def malloc_tinternal_list_true( shared_memory: SharedMemory, size: int, capacity: int = None) -> Tuple[int, int]:
672def malloc_tinternal_list_true(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]:
673    capacity = (size << 1 if size else 16) if capacity is None else capacity
674    datas_sys_part_size = 8 * len(InternalListTrueOffsets)
675    offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, datas_sys_part_size + 8 * capacity)
676    data_offset = offset + datas_sys_part_size
677    write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, capacity)
678    write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size, size)
679    return offset, real_size
def realloc_tinternal_list_true( shared_memory: SharedMemory, offset: int, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[int, int]:
682def realloc_tinternal_list_true(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]:
683    datas_sys_part_size = 8 * len(InternalListTrueOffsets)
684    data_offset = offset + datas_sys_part_size
685    capacity = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity)
686    size = read_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.size)
687    new_list_capacity = capacity << 1 if new_capacity is None else new_capacity
688    if new_capacity is None:
689        if desired_size is None:
690            new_list_capacity = capacity << 1 if capacity else 16
691        else:
692            new_list_capacity = desired_size << 1 if desired_size else 16
693    else:
694        new_list_capacity = new_capacity
695    
696    if new_list_capacity < size:
697        new_list_capacity = size
698    
699    new_offset, new_real_size = shared_memory.realloc(offset, datas_sys_part_size + 8 * new_list_capacity, loop_allowed, zero_mem)
700    data_offset = new_offset + datas_sys_part_size
701    write_uint64(shared_memory.base_address, data_offset + 8 * InternalListTrueOffsets.capacity, new_list_capacity)
702    return new_offset, new_real_size
class IListTrue(BaseIObject, builtins.list):
 705class IListTrue(BaseIObject, list):
 706    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None:
 707        self._shared_memory = shared_memory
 708        self._base_address = shared_memory.base_address
 709        if offset is None:
 710            offset, real_size = shared_memory.malloc(ObjectType.tlist, 8)
 711            self._offset = offset
 712            self._offset__data = offset + 8 * 2
 713            self._offset__pointer_to_internal_list = self._offset__data
 714            
 715            if obj is None:
 716                obj = list()
 717            
 718            data_len = len(obj)
 719            capacity_len = data_len << 1 if data_len else 16
 720            internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len, capacity_len)
 721            self._pointer_to_internal_list = internal_list_offset
 722            for i, item in enumerate(obj):
 723                item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item)
 724                write_uint64(self._base_address, self._item_offset(i), item_offset)
 725        else:
 726            self._offset = offset
 727            self._offset__data = offset + 8 * 2
 728            self._offset__pointer_to_internal_list = self._offset__data
 729    
 730    def raw_to_bytes(self, bytes_num: int) -> bytes:
 731        start_index = self._pointer_to_internal_list
 732        return self._shared_memory.read_mem(start_index, bytes_num)
 733        # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num])
 734    
 735    @property
 736    def _obj_size(self):
 737        return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size)
 738    
 739    @property
 740    def _pointer_to_internal_list(self):
 741        return read_uint64(self._base_address, self._offset__pointer_to_internal_list)
 742
 743    @_pointer_to_internal_list.setter
 744    def _pointer_to_internal_list(self, value: Offset):
 745        write_uint64(self._base_address, self._offset__pointer_to_internal_list, value)
 746
 747    @property
 748    def _list_len(self):
 749        return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * 2 + 8 * InternalListTrueOffsets.size)
 750    
 751    @_list_len.setter
 752    def _list_len(self, value: int):
 753        write_uint64(self._base_address, self._pointer_to_internal_list + 8 * 2 + 8 * InternalListTrueOffsets.size, value)
 754
 755    @property
 756    def _list_capacity(self):
 757        return read_uint64(self._base_address, self._pointer_to_internal_list + 8 * 2 + 8 * InternalListTrueOffsets.capacity)
 758    
 759    def _item_offset(self, key: int) -> Offset:
 760        return self._pointer_to_internal_list + 8 * 2 + 8 * len(InternalListTrueOffsets) + key * 8
 761    
 762    def __len__(self) -> int:
 763        return self._list_len
 764    
 765    def get_children_offsets(self) -> List[Offset]:
 766        return [read_uint64(self._base_address, self._item_offset(i)) for i in range(self._list_len)]
 767    
 768    def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]:
 769        if isinstance(key, int):
 770            if key < 0:
 771                key += len(self)
 772            if key < 0 or key >= len(self):
 773                raise IndexError
 774
 775            item_offset = read_uint64(self._base_address, self._item_offset(key))
 776            return self._shared_memory.get_obj(item_offset)
 777        elif isinstance(key, slice):
 778            if key.step is not None:
 779                raise NotImplementedError
 780            
 781            if key.start is None:
 782                start = 0
 783            elif key.start < 0:
 784                start = key.start + len(self)
 785            else:
 786                start = key.start
 787            
 788            if key.stop is None:
 789                stop = len(self)
 790            elif key.stop < 0:
 791                stop = key.stop + len(self)
 792            else:
 793                stop = key.stop
 794            
 795            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
 796                raise IndexError
 797            
 798            result_list = list()
 799            for i in range(start, stop):
 800                item_offset = read_uint64(self._base_address, self._item_offset(i))
 801                result_list.append(self._shared_memory.get_obj(item_offset))
 802            return result_list
 803        else:
 804            raise TypeError
 805    
 806    def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence]) -> Any:
 807        if isinstance(key, int):
 808            if key < 0:
 809                key += len(self)
 810            if key < 0 or key >= len(self):
 811                raise IndexError
 812
 813            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
 814            write_uint64(self._base_address, self._item_offset(key), item_offset)
 815        elif isinstance(key, slice):
 816            if key.step is not None:
 817                raise NotImplementedError
 818            
 819            if key.start is None:
 820                start = 0
 821            elif key.start < 0:
 822                start = key.start + len(self)
 823            else:
 824                start = key.start
 825            
 826            if key.stop is None:
 827                stop = len(self)
 828            elif key.stop < 0:
 829                stop = key.stop + len(self)
 830            else:
 831                stop = key.stop
 832            
 833            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
 834                raise IndexError
 835            
 836            for i in range(start, stop):
 837                item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value[i - start])
 838                write_uint64(self._base_address, self._item_offset(i), item_offset)
 839        else:
 840            raise TypeError
 841
 842    def __delitem__(self, key: Union[int, slice]) -> None:
 843        if isinstance(key, int):
 844            if key < 0:
 845                key += len(self)
 846            if key < 0 or key >= len(self):
 847                raise IndexError
 848
 849            for i in range(key + 1, len(self)):
 850                item_offset = read_uint64(self._base_address, self._item_offset(i))
 851                self._shared_memory.free(item_offset)
 852                write_uint64(self._base_address, self._item_offset(i - 1), item_offset)
 853            
 854            self._list_len -= 1
 855        elif isinstance(key, slice):
 856            if key.step is not None:
 857                raise NotImplementedError
 858            
 859            if key.start is None:
 860                start = 0
 861            elif key.start < 0:
 862                start = key.start + len(self)
 863            else:
 864                start = key.start
 865            
 866            if key.stop is None:
 867                stop = len(self)
 868            elif key.stop < 0:
 869                stop = key.stop + len(self)
 870            else:
 871                stop = key.stop
 872            
 873            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
 874                raise IndexError
 875            
 876            for i in range(start, stop):
 877                item_offset = read_uint64(self._base_address, self._item_offset(i))
 878                self._shared_memory.free(item_offset)
 879            
 880            del_items_num = stop - start
 881            
 882            for i in range(stop, len(self)):
 883                item_offset = read_uint64(self._base_address, self._item_offset(i))
 884                write_uint64(self._base_address, self._item_offset(i - del_items_num), item_offset)
 885            
 886            self._list_len -= del_items_num
 887        else:
 888            raise TypeError
 889    
 890    def append(self, item: Any) -> None:
 891        if self._list_len > self._list_capacity:
 892            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
 893
 894        item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
 895        write_uint64(self._base_address, self._item_offset(self._list_len), item_offset)
 896        self._list_len += 1
 897
 898    def extend(self, items: Sequence) -> None:
 899        items_num = len(items)
 900        if self._list_len + items_num > self._list_capacity:
 901            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
 902
 903        for i, item in enumerate(items):
 904            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
 905            write_uint64(self._base_address, self._item_offset(self._list_len + i), item_offset)
 906        
 907        self._list_len += items_num
 908    
 909    def insert(self, index: int, item: Any) -> None:
 910        if index < 0:
 911            index += len(self)
 912        if index < 0 or index > len(self):
 913            raise IndexError
 914
 915        if self._list_len > self._list_capacity:
 916            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
 917
 918        for i in range(self._list_len, index, -1):
 919            item_offset = read_uint64(self._base_address, self._item_offset(i - 1))
 920            write_uint64(self._base_address, self._item_offset(i), item_offset)
 921        
 922        item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
 923        write_uint64(self._base_address, self._item_offset(index), item_offset)
 924        self._list_len += 1
 925    
 926    def pop(self, index: int = -1) -> Any:
 927        if index < 0:
 928            index += len(self)
 929        if index < 0 or index >= len(self):
 930            raise IndexError
 931
 932        item_offset = read_uint64(self._base_address, self._item_offset(index))
 933        result = self._shared_memory.get_obj(item_offset)
 934        
 935        for i in range(index + 1, len(self)):
 936            item_offset = read_uint64(self._base_address, self._item_offset(i))
 937            write_uint64(self._base_address, self._item_offset(i - 1), item_offset)
 938        
 939        self._list_len -= 1
 940        return result
 941    
 942    def remove(self, item: Any) -> None:
 943        for i in range(len(self)):
 944            item_offset = read_uint64(self._base_address, self._item_offset(i))
 945            if item_offset == item._offset:
 946                for j in range(i + 1, len(self)):
 947                    item_offset = read_uint64(self._base_address, self._item_offset(j))
 948                    write_uint64(self._base_address, self._item_offset(j - 1), item_offset)
 949                
 950                self._list_len -= 1
 951                return
 952        
 953        raise ValueError
 954    
 955    def clear(self) -> None:
 956        for i in range(len(self)):
 957            item_offset = read_uint64(self._base_address, self._item_offset(i))
 958            self._shared_memory.free(item_offset)
 959        
 960        self._list_len = 0
 961    
 962    def __iter__(self):
 963        return IListIterator(self)
 964    
 965    def __reversed__(self):
 966        return IListReversedIterator(self)
 967    
 968    def __contains__(self, item: Any) -> bool:
 969        for i in range(len(self)):
 970            item_offset = read_uint64(self._base_address, self._item_offset(i))
 971            if item_offset == item._offset:
 972                return True
 973        
 974        return False
 975    
 976    def index(self, item: Any, start: int = 0, stop: int = None) -> int:
 977        if stop is None:
 978            stop = len(self)
 979        
 980        for i in range(start, stop):
 981            item_offset = read_uint64(self._base_address, self._item_offset(i))
 982            if item_offset == item._offset:
 983                return i
 984        
 985        raise ValueError
 986    
 987    def count(self, item: Any) -> int:
 988        result = 0
 989        for i in range(len(self)):
 990            item_offset = read_uint64(self._base_address, self._item_offset(i))
 991            if item_offset == item._offset:
 992                result += 1
 993        
 994        return result
 995    
 996    def reverse(self) -> None:
 997        for i in range(len(self) // 2):
 998            item_offset = read_uint64(self._base_address, self._item_offset(i))
 999            write_uint64(self._base_address, self._item_offset(i), read_uint64(self._base_address, self._item_offset(len(self) - i - 1)))
1000            write_uint64(self._base_address, self._item_offset(len(self) - i - 1), item_offset)
1001    
1002    def sort(self, key: Any = None, reverse: bool = False) -> None:
1003        raise NotImplementedError
1004    
1005    def copy(self) -> 'IList':
1006        result = IList(self._shared_memory)
1007        result.extend(self)
1008        return result
1009    
1010    def __add__(self, other: Sequence) -> 'IList':
1011        result = IList(self._shared_memory)
1012        result.extend(self)
1013        result.extend(other)
1014        return result
1015    
1016    def __iadd__(self, other: Sequence) -> 'IList':
1017        self.extend(other)
1018        return self
1019    
1020    def __mul__(self, other: int) -> 'IList':
1021        result = IList(self._shared_memory)
1022        for i in range(other):
1023            result.extend(self)
1024        
1025        return result
1026    
1027    def __imul__(self, other: int) -> 'IList':
1028        my_copy: IList = self.copy()
1029        for i in range(other):
1030            self.extend(my_copy)
1031        
1032        return self
1033    
1034    def __rmul__(self, other: int) -> 'IList':
1035        return self.__mul__(other)
1036    
1037    def __eq__(self, other: Sequence) -> bool:
1038        if len(self) != len(other):
1039            return False
1040        
1041        for i in range(len(self)):
1042            if self[i] != other[i]:
1043                return False
1044        
1045        return True
1046    
1047    def __ne__(self, other: Sequence) -> bool:
1048        return not self.__eq__(other)
1049    
1050    def __lt__(self, other: Sequence) -> bool:
1051        for i in range(len(self)):
1052            if self[i] >= other[i]:
1053                return False
1054        
1055        return True
1056    
1057    def __le__(self, other: Sequence) -> bool:
1058        for i in range(len(self)):
1059            if self[i] > other[i]:
1060                return False
1061        
1062        return True
1063    
1064    def __gt__(self, other: Sequence) -> bool:
1065        for i in range(len(self)):
1066            if self[i] <= other[i]:
1067                return False
1068        
1069        return True
1070    
1071    def __ge__(self, other: Sequence) -> bool:
1072        for i in range(len(self)):
1073            if self[i] < other[i]:
1074                return False
1075        
1076        return True
1077    
1078    def __repr__(self) -> str:
1079        return f'IList({list(self)})'
1080    
1081    def __str__(self) -> str:
1082        return f'IList({list(self)})'
1083    
1084    def __hash__(self) -> int:
1085        return hash(tuple(self))
1086    
1087    def __sizeof__(self) -> int:
1088        return read_uint64(self._base_address, self._offset + 8 * BaseObjOffsets.obj_size) + read_uint64(self._base_address, self._pointer_to_internal_list, 8 * BaseObjOffsets.obj_size)
1089    
1090    def export(self) -> list:
1091        return list(self)
1092
1093    # def __del__(self) -> None:
1094    #     self._shared_memory.free(self._pointer_to_internal_list)
1095    #     self._shared_memory.free(self._offset)

Built-in mutable sequence.

If no argument is given, the constructor creates a new empty list. The argument must be an iterable if specified.

IListTrue( shared_memory: SharedMemory, offset: int = None, obj: typing.List = None)
706    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None:
707        self._shared_memory = shared_memory
708        self._base_address = shared_memory.base_address
709        if offset is None:
710            offset, real_size = shared_memory.malloc(ObjectType.tlist, 8)
711            self._offset = offset
712            self._offset__data = offset + 8 * 2
713            self._offset__pointer_to_internal_list = self._offset__data
714            
715            if obj is None:
716                obj = list()
717            
718            data_len = len(obj)
719            capacity_len = data_len << 1 if data_len else 16
720            internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len, capacity_len)
721            self._pointer_to_internal_list = internal_list_offset
722            for i, item in enumerate(obj):
723                item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item)
724                write_uint64(self._base_address, self._item_offset(i), item_offset)
725        else:
726            self._offset = offset
727            self._offset__data = offset + 8 * 2
728            self._offset__pointer_to_internal_list = self._offset__data
def raw_to_bytes(self, bytes_num: int) -> bytes:
730    def raw_to_bytes(self, bytes_num: int) -> bytes:
731        start_index = self._pointer_to_internal_list
732        return self._shared_memory.read_mem(start_index, bytes_num)
733        # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num])
def get_children_offsets(self) -> List[int]:
765    def get_children_offsets(self) -> List[Offset]:
766        return [read_uint64(self._base_address, self._item_offset(i)) for i in range(self._list_len)]
def append(self, item: typing.Any) -> None:
890    def append(self, item: Any) -> None:
891        if self._list_len > self._list_capacity:
892            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
893
894        item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
895        write_uint64(self._base_address, self._item_offset(self._list_len), item_offset)
896        self._list_len += 1

Append object to the end of the list.

def extend(self, items: typing.Sequence) -> None:
898    def extend(self, items: Sequence) -> None:
899        items_num = len(items)
900        if self._list_len + items_num > self._list_capacity:
901            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
902
903        for i, item in enumerate(items):
904            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
905            write_uint64(self._base_address, self._item_offset(self._list_len + i), item_offset)
906        
907        self._list_len += items_num

Extend list by appending elements from the iterable.

def insert(self, index: int, item: typing.Any) -> None:
909    def insert(self, index: int, item: Any) -> None:
910        if index < 0:
911            index += len(self)
912        if index < 0 or index > len(self):
913            raise IndexError
914
915        if self._list_len > self._list_capacity:
916            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
917
918        for i in range(self._list_len, index, -1):
919            item_offset = read_uint64(self._base_address, self._item_offset(i - 1))
920            write_uint64(self._base_address, self._item_offset(i), item_offset)
921        
922        item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
923        write_uint64(self._base_address, self._item_offset(index), item_offset)
924        self._list_len += 1

Insert object before index.

def pop(self, index: int = -1) -> Any:
926    def pop(self, index: int = -1) -> Any:
927        if index < 0:
928            index += len(self)
929        if index < 0 or index >= len(self):
930            raise IndexError
931
932        item_offset = read_uint64(self._base_address, self._item_offset(index))
933        result = self._shared_memory.get_obj(item_offset)
934        
935        for i in range(index + 1, len(self)):
936            item_offset = read_uint64(self._base_address, self._item_offset(i))
937            write_uint64(self._base_address, self._item_offset(i - 1), item_offset)
938        
939        self._list_len -= 1
940        return result

Remove and return item at index (default last).

Raises IndexError if list is empty or index is out of range.

def remove(self, item: typing.Any) -> None:
942    def remove(self, item: Any) -> None:
943        for i in range(len(self)):
944            item_offset = read_uint64(self._base_address, self._item_offset(i))
945            if item_offset == item._offset:
946                for j in range(i + 1, len(self)):
947                    item_offset = read_uint64(self._base_address, self._item_offset(j))
948                    write_uint64(self._base_address, self._item_offset(j - 1), item_offset)
949                
950                self._list_len -= 1
951                return
952        
953        raise ValueError

Remove first occurrence of value.

Raises ValueError if the value is not present.

def clear(self) -> None:
955    def clear(self) -> None:
956        for i in range(len(self)):
957            item_offset = read_uint64(self._base_address, self._item_offset(i))
958            self._shared_memory.free(item_offset)
959        
960        self._list_len = 0

Remove all items from list.

def index(self, item: typing.Any, start: int = 0, stop: int = None) -> int:
976    def index(self, item: Any, start: int = 0, stop: int = None) -> int:
977        if stop is None:
978            stop = len(self)
979        
980        for i in range(start, stop):
981            item_offset = read_uint64(self._base_address, self._item_offset(i))
982            if item_offset == item._offset:
983                return i
984        
985        raise ValueError

Return first index of value.

Raises ValueError if the value is not present.

def count(self, item: typing.Any) -> int:
987    def count(self, item: Any) -> int:
988        result = 0
989        for i in range(len(self)):
990            item_offset = read_uint64(self._base_address, self._item_offset(i))
991            if item_offset == item._offset:
992                result += 1
993        
994        return result

Return number of occurrences of value.

def reverse(self) -> None:
 996    def reverse(self) -> None:
 997        for i in range(len(self) // 2):
 998            item_offset = read_uint64(self._base_address, self._item_offset(i))
 999            write_uint64(self._base_address, self._item_offset(i), read_uint64(self._base_address, self._item_offset(len(self) - i - 1)))
1000            write_uint64(self._base_address, self._item_offset(len(self) - i - 1), item_offset)

Reverse IN PLACE.

def sort(self, key: typing.Any = None, reverse: bool = False) -> None:
1002    def sort(self, key: Any = None, reverse: bool = False) -> None:
1003        raise NotImplementedError

Sort the list in ascending order and return None.

The sort is in-place (i.e. the list itself is modified) and stable (i.e. the order of two equal elements is maintained).

If a key function is given, apply it once to each list item and sort them, ascending or descending, according to their function values.

The reverse flag can be set to sort in descending order.

def copy( self) -> IList:
1005    def copy(self) -> 'IList':
1006        result = IList(self._shared_memory)
1007        result.extend(self)
1008        return result

Return a shallow copy of the list.

def export(self) -> list:
1090    def export(self) -> list:
1091        return list(self)
class InternalListOffsets(enum.IntEnum):
1102class InternalListOffsets(IntEnum):
1103    capacity = 0
1104    size = 1

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class InternalListFieldOffsets(enum.IntEnum):
1107class InternalListFieldOffsets(IntEnum):
1108    field_type = 0
1109    offset_or_data = 1

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class InternalListFieldTypes(enum.IntEnum):
1112class InternalListFieldTypes(IntEnum):
1113    tnone = 0
1114    tobj = 1
1115    tint = 2
1116    tfloat = 3
1117    tbool = 4

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
def malloc_tinternal_list( shared_memory: SharedMemory, size: int, capacity: int = None) -> Tuple[int, int]:
1120def malloc_tinternal_list(shared_memory: 'SharedMemory', size: Size, capacity: Size = None) -> Tuple[Offset, Size]:
1121    if (capacity is not None) and (size > capacity):
1122        raise ValueError
1123    
1124    capacity = (size << 1 if size else 16) if capacity is None else capacity
1125    offset, real_size = shared_memory.malloc(ObjectType.tinternal_list, 16 + 16 + capacity * 16, zero_mem=True)
1126    sys_data_offset = offset + 16
1127    write_uint64(shared_memory.base_address, sys_data_offset + 0, capacity)
1128    write_uint64(shared_memory.base_address, sys_data_offset + 8, size)
1129    return offset, real_size
def realloc_tinternal_list( shared_memory: SharedMemory, offset: int, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[int, int]:
1132def realloc_tinternal_list(shared_memory: 'SharedMemory', offset: Offset, desired_size: int = None, new_capacity: int = None, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Offset, Size]:
1133    if (desired_size is not None) and (new_capacity is not None) and (desired_size > new_capacity):
1134        raise ValueError
1135    
1136    sys_data_offset = offset + 16
1137    capacity = read_uint64(shared_memory.base_address, sys_data_offset + 0)
1138    size = read_uint64(shared_memory.base_address, sys_data_offset + 8)
1139    new_list_capacity = capacity << 1 if new_capacity is None else new_capacity
1140    if new_capacity is None:
1141        if desired_size is None:
1142            new_list_capacity = capacity << 1 if capacity else 16
1143        else:
1144            new_list_capacity = desired_size << 1 if desired_size else 16
1145    else:
1146        new_list_capacity = new_capacity
1147    
1148    if new_list_capacity < size:
1149        new_list_capacity = size
1150    
1151    if new_list_capacity == capacity:
1152        real_size = read_uint64(shared_memory.base_address, offset + 8)
1153        return offset, real_size
1154
1155    new_offset, new_real_size = shared_memory.realloc(
1156            offset,
1157            16 + new_list_capacity * 16,
1158            loop_allowed,
1159            zero_mem
1160        )
1161    new_sys_data_offset = new_offset + 16
1162    write_uint64(shared_memory.base_address, new_sys_data_offset + 0, new_list_capacity)
1163    return new_offset, new_real_size
def destroy_tinternal_list( shared_memory: SharedMemory, offset: int) -> None:
1166def destroy_tinternal_list(shared_memory: 'SharedMemory', offset: Offset) -> None:
1167    shared_memory.free(offset)
def uint64_to_bytes(int_data: int) -> bytes:
1170def uint64_to_bytes(int_data: int) -> bytes:
1171    """
1172    For a 64 bit unsigned int in little endian
1173    :param int_data:
1174    :return: bytes(); len == 8
1175    """
1176    from struct import pack
1177    result = pack('<B', int_data)
1178    return result

For a 64 bit unsigned int in little endian :param int_data: :return: bytes(); len == 8

def uint8_to_bytes(int_data: int) -> bytes:
1181def uint8_to_bytes(int_data: int) -> bytes:
1182    """
1183    For a 64 bit unsigned int in little endian
1184    :param int_data:
1185    :return: bytes(); len == 8
1186    """
1187    from struct import pack
1188    result = pack('<Q', int_data)
1189    return result

For a 64 bit unsigned int in little endian :param int_data: :return: bytes(); len == 8

class ListOffsets(enum.IntEnum):
1196class ListOffsets(IntEnum):
1197    internal_list_offset = 0

An enumeration.

internal_list_offset = <ListOffsets.internal_list_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class IList(BaseIObject, builtins.list):
1200class IList(BaseIObject, list):
1201    __slots__ = ('_shared_memory', '_base_address', '_offset', '_offset__data', '_offset__pointer_to_internal_list')
1202
1203    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None:
1204        self._shared_memory = shared_memory
1205        self._base_address = shared_memory.base_address
1206        if offset is None:
1207            offset, real_size = shared_memory.malloc(ObjectType.tlist, 8)
1208            try:
1209                self._offset = offset
1210                self._offset__data = offset + 16
1211                self._offset__pointer_to_internal_list = self._offset__data + 0
1212                
1213                if obj is None:
1214                    obj = list()
1215                
1216                data_len = len(obj)
1217                internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len)
1218                self._pointer_to_internal_list = internal_list_offset
1219                for i, item in enumerate(obj):
1220                    # print(self.get_children_offsets())
1221                    # # print(self.raw_to_list(slice(0, None)))
1222                    # print(self.raw_to_bytes(200))
1223                    self._write_item(i, item)
1224                    # print(self.get_children_offsets())
1225                    # # print(self.raw_to_list(slice(0, None)))
1226                    # print(self.raw_to_bytes(200))
1227                
1228                # print(self.get_children_offsets())
1229                # # print(self.raw_to_list(slice(0, None)))
1230                # print(self.raw_to_bytes(200))
1231                # print('=======================')
1232            except:
1233                self._free_mem()
1234                raise
1235        else:
1236            self._offset = offset
1237            self._offset__data = offset + 16
1238            self._offset__pointer_to_internal_list = self._offset__data + 0
1239    
1240    def raw_to_list(self, key) -> List[bytes]:
1241        if isinstance(key, int):
1242            if key < 0:
1243                key += len(self)
1244            if key < 0 or key >= len(self):
1245                raise IndexError
1246
1247            item_offset = self._read_item_offset_or_data(key)
1248            return [uint64_to_bytes(item_offset)]
1249        elif isinstance(key, slice):
1250            if key.step is not None:
1251                raise NotImplementedError
1252            
1253            if key.start is None:
1254                start = 0
1255            elif key.start < 0:
1256                start = key.start + len(self)
1257            else:
1258                start = key.start
1259            
1260            if key.stop is None:
1261                stop = len(self)
1262            elif key.stop < 0:
1263                stop = key.stop + len(self)
1264            else:
1265                stop = key.stop
1266            
1267            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1268                raise IndexError
1269            
1270            result_list = list()
1271            for i in range(start, stop):
1272                item_offset = self._read_item_offset_or_data(i)
1273                result_list.append(uint64_to_bytes(item_offset))
1274            
1275            return result_list
1276    
1277    def raw_to_bytes(self, bytes_num: int) -> bytes:
1278        start_index = self._pointer_to_internal_list
1279        return self._shared_memory.read_mem(start_index, bytes_num)
1280        # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num])
1281
1282    @property
1283    def _obj_size(self):
1284        return read_uint64(self._base_address, self._offset + 8)
1285    
1286    @property
1287    def _pointer_to_internal_list(self):
1288        return read_uint64(self._base_address, self._offset__pointer_to_internal_list)
1289
1290    @_pointer_to_internal_list.setter
1291    def _pointer_to_internal_list(self, value: Offset):
1292        write_uint64(self._base_address, self._offset__pointer_to_internal_list, value)
1293
1294    @property
1295    def _list_len(self):
1296        return read_uint64(self._base_address, self._pointer_to_internal_list + 16 + 8)
1297    
1298    @_list_len.setter
1299    def _list_len(self, value: int):
1300        write_uint64(self._base_address, self._pointer_to_internal_list + 16 + 8, value)
1301
1302    @property
1303    def _list_capacity(self):
1304        return read_uint64(self._base_address, self._pointer_to_internal_list + 16 + 0)
1305    
1306    def _item_offset(self, key: int) -> Offset:
1307        return self._pointer_to_internal_list + 16 + 16 + key * 16
1308    
1309    def _item_type_offset(self, key: int) -> Offset:
1310        # from os import getpid
1311        result = self._pointer_to_internal_list + 16 + 16 + key * 16 + 0
1312        # add_0 = 16
1313        # add_1 = 16
1314        # add_2 = key * 16
1315        # add_3 = 0
1316        # print(f'PID: {getpid()}. [{add_0},{add_1},{add_2},{add_3}],{add_0 + add_1 + add_2 + add_3},{self._pointer_to_internal_list}: item_type_offset: {key}:{result}')
1317        return result
1318
1319    def _item_value_offset(self, key: int) -> Offset:
1320        # from os import getpid
1321        result = self._pointer_to_internal_list + 16 + 16 + key * 16 + 8
1322        # print(f'PID: {getpid()}. {16 + 16 + key * 16 + 8},{self._pointer_to_internal_list}: item_value_offset: {key}:{result}')
1323        return result
1324
1325    def _read_item_type(self, key: int) -> int:
1326        return read_uint64(self._base_address, self._item_type_offset(key))
1327    
1328    def _write_item_type(self, key: int, item_type: int) -> None:
1329        write_uint64(self._base_address, self._item_type_offset(key), item_type)
1330    
1331    def _read_item_offset_or_data(self, key: int) -> Union[Offset, int]:
1332        return read_uint64(self._base_address, self._item_value_offset(key))
1333
1334    def _write_item_offset_or_data(self, key: int, offset_or_data: Union[Offset, int]) -> None:
1335        write_uint64(self._base_address, self._item_value_offset(key), offset_or_data)
1336    
1337    # def _determine_obj_type(self, obj: Any) -> int:
1338    #     if isinstance(obj, int):
1339    #         return 1
1340    #     elif isinstance(obj, float):
1341    #         return 2
1342    #     elif isinstance(obj, bool):
1343    #         return 3
1344    #     else:
1345    #         return 0
1346    
1347    def _determine_obj_type(self, obj: Any) -> int:
1348        if type(obj) is int:
1349            return 2
1350        elif type(obj) is float:
1351            return 3
1352        elif type(obj) is bool:
1353            return 4
1354        elif obj is None:
1355            return 0
1356        else:
1357            return 1
1358    
1359    def _determine_obj_offset(self, obj: Any) -> Optional[Offset]:
1360        if isinstance(obj, BaseIObject):
1361            return obj._offset
1362        else:
1363            return None
1364    
1365    def _compare_item_to_obj_fast(self, key: int, obj: Any, obj_type: int, obj_offset) -> bool:
1366        result: bool = False
1367        item_type = self._read_item_type(key)
1368        if item_type == obj_type:
1369            if item_type == 1:
1370                if obj_offset is None:
1371                    if self._read_item_value(key, item_type) == obj:
1372                        result = True
1373                else:
1374                    if self._read_item_offset_or_data(key) == obj_offset:
1375                        result = True
1376            elif item_type == 2:
1377                if self._read_item_offset_or_data(key) == obj:
1378                    result = True
1379            elif item_type == 3:
1380                if self._read_item_offset_or_data(key) == obj:
1381                    result = True
1382            elif item_type == 4:
1383                if self._read_item_offset_or_data(key) == obj:
1384                    result = True
1385            elif item_type == 0:
1386                result = obj is None
1387            else:
1388                raise ValueError
1389
1390        return result
1391    
1392    def _compare_item_to_obj(self, key: int, obj: Any) -> bool:
1393        obj_type = self._determine_obj_type(obj)
1394        obj_offset = self._determine_obj_offset(obj)
1395        return self._compare_item_to_obj_fast(key, obj, obj_type, obj_offset)
1396
1397    def _read_item_value(self, key: int, item_type: int) -> Any:
1398        if item_type == 1:
1399            item_offset = read_uint64(self._base_address, self._item_value_offset(key))
1400            return self._shared_memory.get_obj(item_offset)
1401        elif item_type == 2:
1402            return read_int64(self._base_address, self._item_value_offset(key))
1403        elif item_type == 3:
1404            return read_double(self._base_address, self._item_value_offset(key))
1405        elif item_type == 4:
1406            return bool(read_uint64(self._base_address, self._item_value_offset(key)))
1407        elif item_type == 0:
1408            return None
1409        else:
1410            raise ValueError
1411    
1412    def _write_item_value(self, key: int, item_type: int, value: Any) -> None:
1413        if item_type == 1:
1414            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
1415            write_uint64(self._base_address, self._item_value_offset(key), item_offset)
1416        elif item_type == 2:
1417            write_int64(self._base_address, self._item_value_offset(key), value)
1418        elif item_type == 3:
1419            write_double(self._base_address, self._item_value_offset(key), value)
1420        elif item_type == 4:
1421            write_uint64(self._base_address, self._item_value_offset(key), int(value))
1422        elif item_type == 0:
1423            pass
1424        else:
1425            raise ValueError
1426    
1427    def _free_item_value(self, key: int, item_type: int) -> None:
1428        if item_type == 1:
1429            item_offset = read_uint64(self._base_address, self._item_value_offset(key))
1430            # self._shared_memory.free(item_offset)
1431            self._shared_memory.destroy_obj(item_offset)
1432        elif item_type == 2:
1433            pass
1434        elif item_type == 3:
1435            pass
1436        elif item_type == 4:
1437            pass
1438        elif item_type == 0:
1439            pass
1440        else:
1441            raise ValueError
1442
1443        self._write_item_type(key, 0)
1444    
1445    def _read_item_type_and_value(self, key: int) -> Tuple[int, Any]:
1446        item_type = self._read_item_type(key)
1447        return item_type, self._read_item_value(key, item_type)
1448    
1449    def _write_item_value_and_get_type(self, key: int, value: Any) -> int:
1450        if isinstance(value, int):
1451            write_uint64(self._base_address, self._item_value_offset(key), value)
1452            return 2
1453        elif isinstance(value, float):
1454            write_double(self._base_address, self._item_value_offset(key), value)
1455            return 3
1456        elif isinstance(value, bool):
1457            write_uint64(self._base_address, self._item_value_offset(key), int(value))
1458            return 4
1459        elif value is None:
1460            return 0
1461        else:
1462            item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
1463            write_uint64(self._base_address, self._item_value_offset(key), item_offset)
1464            return 1
1465    
1466    def _free_item_value_and_get_type(self, key: int) -> int:
1467        item_type = self._read_item_type(key)
1468        self._free_item_value(key, item_type)
1469        return item_type
1470    
1471    def _read_item(self, key: int) -> Any:
1472        item_type = self._read_item_type(key)
1473        return self._read_item_value(key, item_type)
1474    
1475    def _write_item(self, key: int, value: Any) -> None:
1476        item_type = self._write_item_value_and_get_type(key, value)
1477        self._write_item_type(key, item_type)
1478    
1479    def _free_item(self, key: int) -> None:
1480        item_type = self._read_item_type(key)
1481        self._free_item_value(key, item_type)
1482    
1483    def _copy_item(self, src_key: int, dst_key: int) -> None:
1484        self._write_item_type(dst_key, self._read_item_type(src_key))
1485        self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1486    
1487    def copy_item(self, src_key: int, dst_key: int) -> None:
1488        return self._copy_item(src_key, dst_key)
1489    
1490    def _move_item(self, src_key: int, dst_key: int) -> None:
1491        self._write_item_type(dst_key, self._read_item_type(src_key))
1492        self._write_item_type(src_key, 0)
1493        self._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1494    
1495    def move_item(self, src_key: int, dst_key: int) -> None:
1496        return self._move_item(src_key, dst_key)
1497    
1498    def copy_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None:
1499        other._write_item_type(dst_key, self._read_item_type(src_key))
1500        other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1501    
1502    def move_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None:
1503        other._write_item_type(dst_key, self._read_item_type(src_key))
1504        self._write_item_type(src_key, 0)
1505        other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
1506    
1507    def _swap_items(self, key1: int, key2: int) -> None:
1508        item_type1 = self._read_item_type(key1)
1509        item_offset_or_data1 = self._read_item_offset_or_data(key1)
1510        self._write_item_type(key1, self._read_item_type(key2))
1511        self._write_item_type(key2, item_type1)
1512        self._write_item_offset_or_data(key1, self._read_item_offset_or_data(key2))
1513        self._write_item_offset_or_data(key2, item_offset_or_data1)
1514    
1515    def swap_items(self, key1: int, key2: int) -> None:
1516        return self._swap_items(key1, key2)
1517
1518    def __len__(self) -> int:
1519        return self._list_len
1520    
1521    def get_children_data_or_offsets(self) -> List[Offset]:
1522        return [self._read_item_offset_or_data(i) for i in range(self._list_len)]
1523    
1524    def get_children_offsets(self):
1525        return self.get_children_data_or_offsets()
1526
1527    def _getitem_as_offset(self, key: int) -> Tuple[int, Offset]:
1528            return list__get_item_as_offset(key, self._base_address, self._offset__pointer_to_internal_list)
1529
1530    def __getitem__(self, key: Union[int, slice]) -> Union[Any, List]:
1531        if isinstance(key, int):
1532            base_address = self._base_address
1533            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1534            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1535            self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1536            if key < 0 or key >= self_len:
1537                raise IndexError
1538
1539            return list__get_item(key, self._base_address, self._offset__pointer_to_internal_list, self._shared_memory.get_obj)
1540
1541            # base_address = self._base_address
1542            # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1543            # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1544            # self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1545
1546            # if key < 0:
1547            #     key += self_len
1548            
1549            # if key < 0 or key >= self_len:
1550            #     raise IndexError
1551
1552            # item_type_offset = pointer_to_internal_list + 32 + key * 16
1553            # item_value_offset = pointer_to_internal_list + 40 + key * 16
1554            # item_type = read_uint64(base_address, item_type_offset)
1555            # if item_type == 1:
1556            #     return read_int64(base_address, item_value_offset)
1557            # elif item_type == 2:
1558            #     return read_double(base_address, item_value_offset)
1559            # elif item_type == 3:
1560            #     return bool(read_uint64(base_address, item_value_offset))
1561            # elif item_type == 0:
1562            #     item_offset = read_uint64(base_address, item_value_offset)
1563            #     return self._shared_memory.get_obj(item_offset)
1564            # else:
1565            #     raise ValueError
1566
1567            # # return self._read_item(key)
1568        elif isinstance(key, slice):
1569            if key.step is not None:
1570                raise NotImplementedError
1571            
1572            if key.start is None:
1573                start = 0
1574            elif key.start < 0:
1575                start = key.start + len(self)
1576            else:
1577                start = key.start
1578            
1579            if key.stop is None:
1580                stop = len(self)
1581            elif key.stop < 0:
1582                stop = key.stop + len(self)
1583            else:
1584                stop = key.stop
1585            
1586            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1587                raise IndexError
1588            
1589            result_list = list()
1590            # performance improvement instead of using self._read_item(i)
1591            base_address = self._base_address
1592            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1593            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1594
1595            # item_type_offset = pointer_to_internal_list + 32 + i * 16
1596            item_type_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 0
1597
1598            # item_value_offset = pointer_to_internal_list + 40 + i * 16
1599            item_value_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 8
1600
1601            for i in range(start, stop):
1602                # result_list.append(self._read_item(i))
1603
1604                # performance improvement instead of using self._read_item(i)
1605                item_type = read_uint64(base_address, item_type_offset)
1606                if item_type == 2:
1607                    result_list.append(read_int64(base_address, item_value_offset))
1608                elif item_type == 3:
1609                    result_list.append(read_double(base_address, item_value_offset))
1610                elif item_type == 4:
1611                    result_list.append(bool(read_uint64(base_address, item_value_offset)))
1612                elif item_type == 0:
1613                    result_list.append(None)
1614                elif item_type == 1:
1615                    item_offset = read_uint64(base_address, item_value_offset)
1616                    result_list.append(self._shared_memory.get_obj(item_offset))
1617                else:
1618                    raise ValueError
1619            
1620            return result_list
1621        else:
1622            raise TypeError
1623
1624    def _setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item: bool = True) -> Any:
1625        value_item_type, value_item_offset = value_type_and_offset
1626        list__set_item_as_offset(key, value_item_type, value_item_offset, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj)
1627    
1628    def __setitem__(self, key: Union[int, slice], value: Union[Any, Sequence], need_to_free_item: bool = True) -> Any:
1629        if isinstance(key, int):
1630            # print(f'{key=}, {value=}, {need_to_free_item=}')
1631            # internal_list_data_offset = self._pointer_to_internal_list + 16 + 16 + key * 16 + 0
1632            # internal_list_data_size = self._list_len * 16
1633            # self._shared_memory.print_mem(internal_list_data_offset, internal_list_data_size, 'internal_list before list__set_item')
1634            
1635            base_address = self._base_address
1636            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1637            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1638            self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1639            if key < 0 or key >= self_len:
1640                raise IndexError
1641
1642            list__set_item(key, value, self._base_address, self._offset__pointer_to_internal_list, need_to_free_item, self._shared_memory.destroy_obj, self._shared_memory.put_obj)
1643
1644            # base_address = self._base_address
1645            # offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1646            # pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1647            # self_len = read_uint64(base_address, pointer_to_internal_list + 24)
1648
1649            # if key < 0:
1650            #     key += self_len
1651            
1652            # if key < 0 or key >= self_len:
1653            #     raise IndexError
1654            
1655            # item_type_offset = pointer_to_internal_list + 32 + key * 16
1656            # item_value_offset = pointer_to_internal_list + 40 + key * 16
1657            # if isinstance(value, int):
1658            #     write_int64(base_address, item_value_offset, value)
1659            #     item_type = 1
1660            # elif isinstance(value, float):
1661            #     write_double(base_address, item_value_offset, value)
1662            #     item_type = 2
1663            # elif isinstance(value, bool):
1664            #     write_uint64(base_address, item_value_offset, int(value))
1665            #     item_type = 3
1666            # else:
1667            #     item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(value)
1668            #     write_uint64(base_address, item_value_offset, item_offset)
1669            #     item_type = 0
1670            
1671            # write_uint64(base_address, item_type_offset, item_type)
1672
1673            # # self._write_item(key, value)
1674        elif isinstance(key, slice):
1675            if key.step is not None:
1676                raise NotImplementedError
1677            
1678            if key.start is None:
1679                start = 0
1680            elif key.start < 0:
1681                start = key.start + len(self)
1682            else:
1683                start = key.start
1684            
1685            if key.stop is None:
1686                stop = len(self)
1687            elif key.stop < 0:
1688                stop = key.stop + len(self)
1689            else:
1690                stop = key.stop
1691            
1692            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1693                raise IndexError
1694            
1695            if need_to_free_item:
1696                for i in range(start, stop):
1697                    self._free_item(i)
1698            
1699            # performance improvement instead of using self._write_item(i, item)
1700            base_address = self._base_address
1701            offset__pointer_to_internal_list = self._offset__pointer_to_internal_list
1702            pointer_to_internal_list = read_uint64(base_address, offset__pointer_to_internal_list)
1703
1704            # item_type_offset = pointer_to_internal_list + 32 + i * 16
1705            item_type_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 0
1706
1707            # item_value_offset = pointer_to_internal_list + 40 + i * 16
1708            item_value_offset = pointer_to_internal_list + 16 + 16 + i * 16 + 8
1709
1710            for i in range(start, stop):
1711                item = value[i - start]
1712                # self._write_item(i, item)
1713
1714                # performance improvement instead of using self._write_item(i, item)
1715                if isinstance(item, int):
1716                    write_int64(base_address, item_value_offset, item)
1717                    item_type = 2
1718                elif isinstance(item, float):
1719                    write_double(base_address, item_value_offset, item)
1720                    item_type = 3
1721                elif isinstance(item, bool):
1722                    write_uint64(base_address, item_value_offset, int(item))
1723                    item_type = 4
1724                elif item is None:
1725                    item_type = 0
1726                else:
1727                    item_mapped_obj, item_offset, item_size = self._shared_memory.put_obj(item)
1728                    write_uint64(base_address, item_value_offset, item_offset)
1729                    item_type = 1
1730                
1731                write_uint64(base_address, item_type_offset, item_type)
1732        else:
1733            raise TypeError
1734
1735    def __delitem__(self, key: Union[int, slice], need_to_free_item: bool = True) -> None:
1736        if isinstance(key, int):
1737            if key < 0:
1738                key += len(self)
1739            if key < 0 or key >= len(self):
1740                raise IndexError
1741
1742            if need_to_free_item:
1743                self._free_item(key)
1744
1745            for i in range(key + 1, len(self)):
1746                self._move_item(i, i - 1)
1747            
1748            self._list_len -= 1
1749        elif isinstance(key, slice):
1750            if key.step is not None:
1751                raise NotImplementedError
1752            
1753            if key.start is None:
1754                start = 0
1755            elif key.start < 0:
1756                start = key.start + len(self)
1757            else:
1758                start = key.start
1759            
1760            if key.stop is None:
1761                stop = len(self)
1762            elif key.stop < 0:
1763                stop = key.stop + len(self)
1764            else:
1765                stop = key.stop
1766            
1767            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1768                raise IndexError
1769            
1770            if need_to_free_item:
1771                for i in range(start, stop):
1772                    self._free_item(i)
1773            
1774            del_items_num = stop - start
1775            
1776            for i in range(stop, len(self)):
1777                self._move_item(i, i - del_items_num)
1778            
1779            self._list_len -= del_items_num
1780        else:
1781            raise TypeError
1782    
1783    def append(self, item: Any) -> None:
1784        if self._list_len > self._list_capacity:
1785            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1786
1787        self._list_len += 1
1788        self.__setitem__(self._list_len - 1, item, need_to_free_item=False)
1789    
1790    def append_as_offset(self, value_type_and_offset: Tuple[int, Offset]) -> None:
1791        if self._list_len > self._list_capacity:
1792            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1793
1794        self._list_len += 1
1795        self._setitem_as_offset(self._list_len - 1, value_type_and_offset, need_to_free_item=False)
1796    
1797    def getitem_as_offset(self, key: int) -> Tuple[int, Offset]:
1798        return self._getitem_as_offset(key)
1799    
1800    def setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item=True) -> None:
1801        self._setitem_as_offset(key, value_type_and_offset, need_to_free_item)
1802
1803    def extend(self, items: Sequence) -> None:
1804        items_num = len(items)
1805        if (self._list_len + items_num) > self._list_capacity:
1806            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
1807
1808        original_list_len = self._list_len
1809        self._list_len += items_num
1810        for i, item in enumerate(items):
1811            self.__setitem__(original_list_len + i, item, need_to_free_item=False)
1812    
1813    def extend_with(self, items_num: int, value = None) -> None:
1814        if (self._list_len + items_num) > self._list_capacity:
1815            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
1816
1817        original_list_len = self._list_len
1818        self._list_len += items_num
1819        for i in range(items_num):
1820            self.__setitem__(original_list_len + i, value, need_to_free_item=False)
1821
1822    def set_capacity(self, capacity: int) -> int:
1823        if capacity <= self._list_capacity:
1824            return
1825        
1826        self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, capacity)
1827        return result_size
1828    
1829    def insert(self, index: int, item: Any) -> None:
1830        if index < 0:
1831            index += len(self)
1832        if index < 0 or index > len(self):
1833            raise IndexError
1834
1835        if self._list_len > self._list_capacity:
1836            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'before realloc. {}')
1837            # self.print_internal_list('before realloc. {}')
1838            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1839            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after realloc. {}')
1840            # self.print_internal_list('after realloc. {}')
1841
1842        # self.print_internal_list('before inserting {}')
1843        self._list_len += 1
1844        # self.print_internal_list('before inserting but after +1 {}')
1845        for i in range(self._list_len - 1, index, -1):
1846            self._move_item(i - 1, i)
1847            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, f'after self._move_item({i - 1, i}). {{}}')
1848            # self.print_internal_list(f'after self._move_item({i - 1, i}). {{}}')
1849        
1850        self.__setitem__(index, item, need_to_free_item=False)
1851        # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after inserting. {}')
1852        # self.print_internal_list('after inserting. {}')
1853    
1854    def print_internal_list(self, text: str = None, additional_cells: int = 0):
1855        internal_list = self._shared_memory.read_mem(self._pointer_to_internal_list, 16 + 16 + self._list_len * 16 + additional_cells * 16)
1856        print('--- internal list -------------')
1857        if text:
1858            print(text.format(self._pointer_to_internal_list))
1859            print('------')
1860
1861        index = 0
1862        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1863        index += 8
1864        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1865        index += 8
1866        print('---')
1867        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1868        index += 8
1869        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1870        index += 8
1871        print('---')
1872        for i in range(self._list_len):
1873            print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8 * 2])
1874            index += 8 * 2
1875        
1876        if additional_cells:
1877            print('------')
1878            for i in range(additional_cells):
1879                print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1880                index += 8 * 2
1881        print('-------------------------------')
1882        print()
1883
1884    def pop(self, index: int = -1) -> Any:
1885        if index < 0:
1886            index += len(self)
1887        if index < 0 or index >= len(self):
1888            raise IndexError
1889
1890        result = self.__getitem__(index)
1891        
1892        for i in range(index + 1, len(self)):
1893            self._move_item(i, i - 1)
1894        
1895        self._list_len -= 1
1896        return result
1897    
1898    def remove(self, obj: Any) -> None:
1899        obj_type = self._determine_obj_type(obj)
1900        obj_offset = self._determine_obj_offset(obj)
1901        found_in_index = None
1902        for i in range(len(self)):
1903            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1904                found_in_index = i
1905                break
1906        
1907        if found_in_index is None:
1908            raise ValueError
1909        else:
1910            self.__delitem__(found_in_index)
1911    
1912    def clear(self, need_to_free_item: bool = True) -> None:
1913        if need_to_free_item:
1914            for i in range(len(self)):
1915                self._free_item(i)
1916        
1917        self._list_len = 0
1918    
1919    def __iter__(self):
1920        return IListIterator(self)
1921    
1922    def __reversed__(self):
1923        return IListReversedIterator(self)
1924    
1925    def __contains__(self, obj: Any) -> bool:
1926        obj_type = self._determine_obj_type(obj)
1927        obj_offset = self._determine_obj_offset(obj)
1928        found_in_index = None
1929        for i in range(len(self)):
1930            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1931                found_in_index = i
1932                break
1933        
1934        if found_in_index is None:
1935            return False
1936        else:
1937            return True
1938    
1939    def index(self, obj: Any, start: int = 0, stop: int = None) -> int:
1940        if stop is None:
1941            stop = len(self)
1942
1943        obj_type = self._determine_obj_type(obj)
1944        obj_offset = self._determine_obj_offset(obj)
1945        found_in_index = None
1946        for i in range(start, stop):
1947            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1948                found_in_index = i
1949                break
1950
1951        if found_in_index is None:
1952            raise ValueError
1953        else:
1954            return found_in_index
1955    
1956    def count(self, obj: Any) -> int:
1957        obj_type = self._determine_obj_type(obj)
1958        obj_offset = self._determine_obj_offset(obj)
1959        result = 0
1960        for i in range(len(self)):
1961            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1962                result += 1
1963
1964        return result
1965    
1966    def reverse(self) -> None:
1967        my_len = len(self)
1968        for i in range(my_len // 2):
1969            self._swap_items(i, my_len - i - 1)
1970    
1971    def sort(self, key: Any = None, reverse: bool = False) -> None:
1972        raise NotImplementedError
1973    
1974    def copy(self) -> 'IList':
1975        result = IList(self._shared_memory)
1976        result.extend(self)
1977        return result
1978    
1979    def __add__(self, other: Sequence) -> 'IList':
1980        result = IList(self._shared_memory)
1981        result.extend(self)
1982        result.extend(other)
1983        return result
1984    
1985    def __iadd__(self, other: Sequence) -> 'IList':
1986        self.extend(other)
1987        return self
1988    
1989    def __mul__(self, other: int) -> 'IList':
1990        result = IList(self._shared_memory)
1991        for i in range(other):
1992            result.extend(self)
1993        
1994        return result
1995    
1996    def __imul__(self, other: int) -> 'IList':
1997        my_copy: IList = self.copy()
1998        for i in range(other):
1999            self.extend(my_copy)
2000        
2001        return self
2002    
2003    def __rmul__(self, other: int) -> 'IList':
2004        return self.__mul__(other)
2005    
2006    def __eq__(self, other: Sequence) -> bool:
2007        if len(self) != len(other):
2008            return False
2009        
2010        for i in range(len(self)):
2011            if self[i] != other[i]:
2012                return False
2013        
2014        return True
2015    
2016    def __ne__(self, other: Sequence) -> bool:
2017        return not self.__eq__(other)
2018    
2019    def __lt__(self, other: Sequence) -> bool:
2020        for i in range(len(self)):
2021            if self[i] >= other[i]:
2022                return False
2023        
2024        return True
2025    
2026    def __le__(self, other: Sequence) -> bool:
2027        for i in range(len(self)):
2028            if self[i] > other[i]:
2029                return False
2030        
2031        return True
2032    
2033    def __gt__(self, other: Sequence) -> bool:
2034        for i in range(len(self)):
2035            if self[i] <= other[i]:
2036                return False
2037        
2038        return True
2039    
2040    def __ge__(self, other: Sequence) -> bool:
2041        for i in range(len(self)):
2042            if self[i] < other[i]:
2043                return False
2044        
2045        return True
2046    
2047    def __repr__(self) -> str:
2048        return f'IList({list(self)})'
2049    
2050    def __str__(self) -> str:
2051        return f'IList({list(self)})'
2052    
2053    def __hash__(self) -> int:
2054        return hash(tuple(self))
2055    
2056    def __sizeof__(self) -> int:
2057        return 16 + read_uint64(self._base_address, self._offset + 8) + 16 + read_uint64(self._base_address, self._pointer_to_internal_list, 8)
2058    
2059    def export(self) -> list:
2060        return list(self)
2061
2062    # def __del__(self) -> None:
2063    #     self._shared_memory.free(self._pointer_to_internal_list)
2064    #     self._shared_memory.free(self._offset)
2065
2066    def _free_mem(self):
2067        if self._offset is not None:
2068            if self._pointer_to_internal_list is not None:
2069                self.clear()
2070                destroy_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
2071                self._pointer_to_internal_list = 0
2072            
2073            self._shared_memory.free(self._offset)
2074            self._offset = None

Built-in mutable sequence.

If no argument is given, the constructor creates a new empty list. The argument must be an iterable if specified.

IList( shared_memory: SharedMemory, offset: int = None, obj: typing.List = None)
1203    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: List = None) -> None:
1204        self._shared_memory = shared_memory
1205        self._base_address = shared_memory.base_address
1206        if offset is None:
1207            offset, real_size = shared_memory.malloc(ObjectType.tlist, 8)
1208            try:
1209                self._offset = offset
1210                self._offset__data = offset + 16
1211                self._offset__pointer_to_internal_list = self._offset__data + 0
1212                
1213                if obj is None:
1214                    obj = list()
1215                
1216                data_len = len(obj)
1217                internal_list_offset, data_tuple_real_size = malloc_tinternal_list(shared_memory, data_len)
1218                self._pointer_to_internal_list = internal_list_offset
1219                for i, item in enumerate(obj):
1220                    # print(self.get_children_offsets())
1221                    # # print(self.raw_to_list(slice(0, None)))
1222                    # print(self.raw_to_bytes(200))
1223                    self._write_item(i, item)
1224                    # print(self.get_children_offsets())
1225                    # # print(self.raw_to_list(slice(0, None)))
1226                    # print(self.raw_to_bytes(200))
1227                
1228                # print(self.get_children_offsets())
1229                # # print(self.raw_to_list(slice(0, None)))
1230                # print(self.raw_to_bytes(200))
1231                # print('=======================')
1232            except:
1233                self._free_mem()
1234                raise
1235        else:
1236            self._offset = offset
1237            self._offset__data = offset + 16
1238            self._offset__pointer_to_internal_list = self._offset__data + 0
def raw_to_list(self, key) -> List[bytes]:
1240    def raw_to_list(self, key) -> List[bytes]:
1241        if isinstance(key, int):
1242            if key < 0:
1243                key += len(self)
1244            if key < 0 or key >= len(self):
1245                raise IndexError
1246
1247            item_offset = self._read_item_offset_or_data(key)
1248            return [uint64_to_bytes(item_offset)]
1249        elif isinstance(key, slice):
1250            if key.step is not None:
1251                raise NotImplementedError
1252            
1253            if key.start is None:
1254                start = 0
1255            elif key.start < 0:
1256                start = key.start + len(self)
1257            else:
1258                start = key.start
1259            
1260            if key.stop is None:
1261                stop = len(self)
1262            elif key.stop < 0:
1263                stop = key.stop + len(self)
1264            else:
1265                stop = key.stop
1266            
1267            if start < 0 or start >= len(self) or stop < 0 or stop > len(self) or start >= stop:
1268                raise IndexError
1269            
1270            result_list = list()
1271            for i in range(start, stop):
1272                item_offset = self._read_item_offset_or_data(i)
1273                result_list.append(uint64_to_bytes(item_offset))
1274            
1275            return result_list
def raw_to_bytes(self, bytes_num: int) -> bytes:
1277    def raw_to_bytes(self, bytes_num: int) -> bytes:
1278        start_index = self._pointer_to_internal_list
1279        return self._shared_memory.read_mem(start_index, bytes_num)
1280        # return bytes(self._shared_memory._shared_memory.buf[start_index : start_index + bytes_num])
def copy_item(self, src_key: int, dst_key: int) -> None:
1487    def copy_item(self, src_key: int, dst_key: int) -> None:
1488        return self._copy_item(src_key, dst_key)
def move_item(self, src_key: int, dst_key: int) -> None:
1495    def move_item(self, src_key: int, dst_key: int) -> None:
1496        return self._move_item(src_key, dst_key)
def copy_item_to_list( self, src_key: int, other: IList, dst_key: int) -> None:
1498    def copy_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None:
1499        other._write_item_type(dst_key, self._read_item_type(src_key))
1500        other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
def move_item_to_list( self, src_key: int, other: IList, dst_key: int) -> None:
1502    def move_item_to_list(self, src_key: int, other: 'IList', dst_key: int) -> None:
1503        other._write_item_type(dst_key, self._read_item_type(src_key))
1504        self._write_item_type(src_key, 0)
1505        other._write_item_offset_or_data(dst_key, self._read_item_offset_or_data(src_key))
def swap_items(self, key1: int, key2: int) -> None:
1515    def swap_items(self, key1: int, key2: int) -> None:
1516        return self._swap_items(key1, key2)
def get_children_data_or_offsets(self) -> List[int]:
1521    def get_children_data_or_offsets(self) -> List[Offset]:
1522        return [self._read_item_offset_or_data(i) for i in range(self._list_len)]
def get_children_offsets(self):
1524    def get_children_offsets(self):
1525        return self.get_children_data_or_offsets()
def append(self, item: typing.Any) -> None:
1783    def append(self, item: Any) -> None:
1784        if self._list_len > self._list_capacity:
1785            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1786
1787        self._list_len += 1
1788        self.__setitem__(self._list_len - 1, item, need_to_free_item=False)

Append object to the end of the list.

def append_as_offset(self, value_type_and_offset: typing.Tuple[int, int]) -> None:
1790    def append_as_offset(self, value_type_and_offset: Tuple[int, Offset]) -> None:
1791        if self._list_len > self._list_capacity:
1792            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1793
1794        self._list_len += 1
1795        self._setitem_as_offset(self._list_len - 1, value_type_and_offset, need_to_free_item=False)
def getitem_as_offset(self, key: int) -> Tuple[int, int]:
1797    def getitem_as_offset(self, key: int) -> Tuple[int, Offset]:
1798        return self._getitem_as_offset(key)
def setitem_as_offset( self, key: int, value_type_and_offset: typing.Tuple[int, int], need_to_free_item=True) -> None:
1800    def setitem_as_offset(self, key: int, value_type_and_offset: Tuple[int, Offset], need_to_free_item=True) -> None:
1801        self._setitem_as_offset(key, value_type_and_offset, need_to_free_item)
def extend(self, items: typing.Sequence) -> None:
1803    def extend(self, items: Sequence) -> None:
1804        items_num = len(items)
1805        if (self._list_len + items_num) > self._list_capacity:
1806            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
1807
1808        original_list_len = self._list_len
1809        self._list_len += items_num
1810        for i, item in enumerate(items):
1811            self.__setitem__(original_list_len + i, item, need_to_free_item=False)

Extend list by appending elements from the iterable.

def extend_with(self, items_num: int, value=None) -> None:
1813    def extend_with(self, items_num: int, value = None) -> None:
1814        if (self._list_len + items_num) > self._list_capacity:
1815            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, self._list_len + items_num)
1816
1817        original_list_len = self._list_len
1818        self._list_len += items_num
1819        for i in range(items_num):
1820            self.__setitem__(original_list_len + i, value, need_to_free_item=False)
def set_capacity(self, capacity: int) -> int:
1822    def set_capacity(self, capacity: int) -> int:
1823        if capacity <= self._list_capacity:
1824            return
1825        
1826        self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list, capacity)
1827        return result_size
def insert(self, index: int, item: typing.Any) -> None:
1829    def insert(self, index: int, item: Any) -> None:
1830        if index < 0:
1831            index += len(self)
1832        if index < 0 or index > len(self):
1833            raise IndexError
1834
1835        if self._list_len > self._list_capacity:
1836            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'before realloc. {}')
1837            # self.print_internal_list('before realloc. {}')
1838            self._pointer_to_internal_list, result_size = realloc_tinternal_list(self._shared_memory, self._pointer_to_internal_list)
1839            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after realloc. {}')
1840            # self.print_internal_list('after realloc. {}')
1841
1842        # self.print_internal_list('before inserting {}')
1843        self._list_len += 1
1844        # self.print_internal_list('before inserting but after +1 {}')
1845        for i in range(self._list_len - 1, index, -1):
1846            self._move_item(i - 1, i)
1847            # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, f'after self._move_item({i - 1, i}). {{}}')
1848            # self.print_internal_list(f'after self._move_item({i - 1, i}). {{}}')
1849        
1850        self.__setitem__(index, item, need_to_free_item=False)
1851        # self._shared_memory.print_mem(self._pointer_to_internal_list, 200, 'after inserting. {}')
1852        # self.print_internal_list('after inserting. {}')

Insert object before index.

def print_internal_list(self, text: str = None, additional_cells: int = 0):
1854    def print_internal_list(self, text: str = None, additional_cells: int = 0):
1855        internal_list = self._shared_memory.read_mem(self._pointer_to_internal_list, 16 + 16 + self._list_len * 16 + additional_cells * 16)
1856        print('--- internal list -------------')
1857        if text:
1858            print(text.format(self._pointer_to_internal_list))
1859            print('------')
1860
1861        index = 0
1862        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1863        index += 8
1864        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1865        index += 8
1866        print('---')
1867        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1868        index += 8
1869        print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1870        index += 8
1871        print('---')
1872        for i in range(self._list_len):
1873            print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8 * 2])
1874            index += 8 * 2
1875        
1876        if additional_cells:
1877            print('------')
1878            for i in range(additional_cells):
1879                print(f'{index},{self._pointer_to_internal_list + index}:', internal_list[index:index + 8])
1880                index += 8 * 2
1881        print('-------------------------------')
1882        print()
def pop(self, index: int = -1) -> Any:
1884    def pop(self, index: int = -1) -> Any:
1885        if index < 0:
1886            index += len(self)
1887        if index < 0 or index >= len(self):
1888            raise IndexError
1889
1890        result = self.__getitem__(index)
1891        
1892        for i in range(index + 1, len(self)):
1893            self._move_item(i, i - 1)
1894        
1895        self._list_len -= 1
1896        return result

Remove and return item at index (default last).

Raises IndexError if list is empty or index is out of range.

def remove(self, obj: typing.Any) -> None:
1898    def remove(self, obj: Any) -> None:
1899        obj_type = self._determine_obj_type(obj)
1900        obj_offset = self._determine_obj_offset(obj)
1901        found_in_index = None
1902        for i in range(len(self)):
1903            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1904                found_in_index = i
1905                break
1906        
1907        if found_in_index is None:
1908            raise ValueError
1909        else:
1910            self.__delitem__(found_in_index)

Remove first occurrence of value.

Raises ValueError if the value is not present.

def clear(self, need_to_free_item: bool = True) -> None:
1912    def clear(self, need_to_free_item: bool = True) -> None:
1913        if need_to_free_item:
1914            for i in range(len(self)):
1915                self._free_item(i)
1916        
1917        self._list_len = 0

Remove all items from list.

def index(self, obj: typing.Any, start: int = 0, stop: int = None) -> int:
1939    def index(self, obj: Any, start: int = 0, stop: int = None) -> int:
1940        if stop is None:
1941            stop = len(self)
1942
1943        obj_type = self._determine_obj_type(obj)
1944        obj_offset = self._determine_obj_offset(obj)
1945        found_in_index = None
1946        for i in range(start, stop):
1947            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1948                found_in_index = i
1949                break
1950
1951        if found_in_index is None:
1952            raise ValueError
1953        else:
1954            return found_in_index

Return first index of value.

Raises ValueError if the value is not present.

def count(self, obj: typing.Any) -> int:
1956    def count(self, obj: Any) -> int:
1957        obj_type = self._determine_obj_type(obj)
1958        obj_offset = self._determine_obj_offset(obj)
1959        result = 0
1960        for i in range(len(self)):
1961            if self._compare_item_to_obj_fast(i, obj, obj_type, obj_offset):
1962                result += 1
1963
1964        return result

Return number of occurrences of value.

def reverse(self) -> None:
1966    def reverse(self) -> None:
1967        my_len = len(self)
1968        for i in range(my_len // 2):
1969            self._swap_items(i, my_len - i - 1)

Reverse IN PLACE.

def sort(self, key: typing.Any = None, reverse: bool = False) -> None:
1971    def sort(self, key: Any = None, reverse: bool = False) -> None:
1972        raise NotImplementedError

Sort the list in ascending order and return None.

The sort is in-place (i.e. the list itself is modified) and stable (i.e. the order of two equal elements is maintained).

If a key function is given, apply it once to each list item and sort them, ascending or descending, according to their function values.

The reverse flag can be set to sort in descending order.

def copy( self) -> IList:
1974    def copy(self) -> 'IList':
1975        result = IList(self._shared_memory)
1976        result.extend(self)
1977        return result

Return a shallow copy of the list.

def export(self) -> list:
2059    def export(self) -> list:
2060        return list(self)
class IListIterator:
2080class IListIterator:
2081    def __init__(self, ilist: IList) -> None:
2082        self._ilist = ilist
2083        self._index = 0
2084    
2085    def __next__(self):
2086        if self._index < len(self._ilist):
2087            # self._ilist.print_internal_list(f'ListIterator[{self._index}]. {{}}')
2088            result = self._ilist[self._index]
2089            self._index += 1
2090            return result
2091        else:
2092            raise StopIteration
2093    
2094    def __iter__(self):
2095        return self
IListIterator( ilist: IList)
2081    def __init__(self, ilist: IList) -> None:
2082        self._ilist = ilist
2083        self._index = 0
class IListReversedIterator:
2098class IListReversedIterator:
2099    def __init__(self, ilist: IList) -> None:
2100        self._ilist = ilist
2101        self._index = len(ilist) - 1
2102    
2103    def __next__(self):
2104        if self._index >= 0:
2105            result = self._ilist[self._index]
2106            self._index -= 1
2107            return result
2108        else:
2109            raise StopIteration
2110    
2111    def __iter__(self):
2112        return self
IListReversedIterator( ilist: IList)
2099    def __init__(self, ilist: IList) -> None:
2100        self._ilist = ilist
2101        self._index = len(ilist) - 1
class TList:
2115class TList:
2116    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: list) -> Tuple[list, Offset, Size]:
2117        obj = IList(shared_memory, obj=obj)
2118        return obj, obj._offset, obj._obj_size
2119    
2120    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2121        if ObjectType.tlist != read_uint64(shared_memory.base_address, offset):
2122            raise WrongObjectTypeError
2123        
2124        return IList(shared_memory, offset)
2125    
2126    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
2127        if ObjectType.tlist != read_uint64(shared_memory.base_address, offset):
2128            raise WrongObjectTypeError
2129        
2130        obj: IList = IList(shared_memory, offset)
2131        obj._free_mem()
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: list) -> Tuple[list, int, int]:
2116    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: list) -> Tuple[list, Offset, Size]:
2117        obj = IList(shared_memory, obj=obj)
2118        return obj, obj._offset, obj._obj_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> None:
2120    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2121        if ObjectType.tlist != read_uint64(shared_memory.base_address, offset):
2122            raise WrongObjectTypeError
2123        
2124        return IList(shared_memory, offset)
def destroy( self, shared_memory: SharedMemory, offset: int):
2126    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
2127        if ObjectType.tlist != read_uint64(shared_memory.base_address, offset):
2128            raise WrongObjectTypeError
2129        
2130        obj: IList = IList(shared_memory, offset)
2131        obj._free_mem()
class TupleOffsets(enum.IntEnum):
2138class TupleOffsets(IntEnum):
2139    size = 0

An enumeration.

size = <TupleOffsets.size: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TupleFieldOffsets(enum.IntEnum):
2142class TupleFieldOffsets(IntEnum):
2143    item_offset = 0

An enumeration.

item_offset = <TupleFieldOffsets.item_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TTuple:
2146class TTuple:
2147    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: tuple) -> Tuple[tuple, Offset, Size]:
2148        offset, real_size = shared_memory.malloc(ObjectType.ttuple, 8 + len(obj) * 8)
2149        created_items_offsets: List[Offset] = list()
2150        try:
2151            if (1, [2, 3]) == obj:
2152                shared_memory.offset_to_be_monitored = offset
2153            
2154            write_uint64(shared_memory.base_address, offset + 16 + 0, len(obj))
2155            for i, item in enumerate(obj):
2156                item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item)
2157                created_items_offsets.append(item_offset)
2158                write_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8, item_offset)
2159        except:
2160            shared_memory.free(offset)
2161            for item_offset in created_items_offsets:
2162                shared_memory.destroy_obj(item_offset)
2163            
2164            raise
2165        
2166        return obj, offset, real_size
2167    
2168    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2169        if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset):
2170            raise WrongObjectTypeError
2171
2172        result_list = list()
2173        size = read_uint64(shared_memory.base_address, offset + 16 + 0)
2174        for i in range(size):
2175            item_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8)
2176            result_list.append(shared_memory.get_obj(item_offset))
2177        
2178        return tuple(result_list)
2179    
2180    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
2181        if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset):
2182            raise WrongObjectTypeError
2183
2184        size = read_uint64(shared_memory.base_address, offset + 16 + 0)
2185        for i in range(size):
2186            item_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8)
2187            shared_memory.destroy_obj(item_offset)
2188        
2189        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: tuple) -> Tuple[tuple, int, int]:
2147    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: tuple) -> Tuple[tuple, Offset, Size]:
2148        offset, real_size = shared_memory.malloc(ObjectType.ttuple, 8 + len(obj) * 8)
2149        created_items_offsets: List[Offset] = list()
2150        try:
2151            if (1, [2, 3]) == obj:
2152                shared_memory.offset_to_be_monitored = offset
2153            
2154            write_uint64(shared_memory.base_address, offset + 16 + 0, len(obj))
2155            for i, item in enumerate(obj):
2156                item_mapped_obj, item_offset, item_size = shared_memory.put_obj(item)
2157                created_items_offsets.append(item_offset)
2158                write_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8, item_offset)
2159        except:
2160            shared_memory.free(offset)
2161            for item_offset in created_items_offsets:
2162                shared_memory.destroy_obj(item_offset)
2163            
2164            raise
2165        
2166        return obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> None:
2168    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2169        if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset):
2170            raise WrongObjectTypeError
2171
2172        result_list = list()
2173        size = read_uint64(shared_memory.base_address, offset + 16 + 0)
2174        for i in range(size):
2175            item_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8)
2176            result_list.append(shared_memory.get_obj(item_offset))
2177        
2178        return tuple(result_list)
def destroy( self, shared_memory: SharedMemory, offset: int):
2180    def destroy(self, shared_memory: 'SharedMemory', offset: Offset):
2181        if ObjectType.ttuple != read_uint64(shared_memory.base_address, offset):
2182            raise WrongObjectTypeError
2183
2184        size = read_uint64(shared_memory.base_address, offset + 16 + 0)
2185        for i in range(size):
2186            item_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 + i * 8)
2187            shared_memory.destroy_obj(item_offset)
2188        
2189        shared_memory.free(offset)
class DatetimeOffsets(enum.IntEnum):
2196class DatetimeOffsets(IntEnum):
2197    data_bytes_offset = 0

An enumeration.

data_bytes_offset = <DatetimeOffsets.data_bytes_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
DatetimeTypes = typing.Union[datetime.datetime, datetime.timedelta, datetime.timezone, datetime.date, datetime.time]
class TDatetime:
2203class TDatetime:
2204    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: DatetimeTypes) -> Tuple[DatetimeTypes, Offset, Size]:
2205        offset, real_size = shared_memory.malloc(ObjectType.tdatetime, 8)
2206        created_items_offsets: List[Offset] = list()
2207        try:
2208            data_tuple_mapped_obj, data_bytes_offset, data_tuple_size = shared_memory.put_obj(pickle_dumps(obj))
2209            created_items_offsets.append(data_bytes_offset)
2210            write_uint64(shared_memory.base_address, offset + 16 + 0, data_bytes_offset)
2211        except:
2212            shared_memory.free(offset)
2213            for item_offset in created_items_offsets:
2214                shared_memory.destroy_obj(item_offset)
2215            
2216            raise
2217
2218        return pickle_loads(data_tuple_mapped_obj), offset, real_size
2219    
2220    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> DatetimeTypes:
2221        if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset):
2222            raise WrongObjectTypeError
2223
2224        data_bytes_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2225        result_tuple = shared_memory.get_obj(data_bytes_offset)
2226        return pickle_loads(result_tuple)
2227    
2228    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2229        if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset):
2230            raise WrongObjectTypeError
2231
2232        data_bytes_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2233        shared_memory.destroy_obj(data_bytes_offset)
2234        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: typing.Union[datetime.datetime, datetime.timedelta, datetime.timezone, datetime.date, datetime.time]) -> Tuple[Union[datetime.datetime, datetime.timedelta, datetime.timezone, datetime.date, datetime.time], int, int]:
2204    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: DatetimeTypes) -> Tuple[DatetimeTypes, Offset, Size]:
2205        offset, real_size = shared_memory.malloc(ObjectType.tdatetime, 8)
2206        created_items_offsets: List[Offset] = list()
2207        try:
2208            data_tuple_mapped_obj, data_bytes_offset, data_tuple_size = shared_memory.put_obj(pickle_dumps(obj))
2209            created_items_offsets.append(data_bytes_offset)
2210            write_uint64(shared_memory.base_address, offset + 16 + 0, data_bytes_offset)
2211        except:
2212            shared_memory.free(offset)
2213            for item_offset in created_items_offsets:
2214                shared_memory.destroy_obj(item_offset)
2215            
2216            raise
2217
2218        return pickle_loads(data_tuple_mapped_obj), offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> Union[datetime.datetime, datetime.timedelta, datetime.timezone, datetime.date, datetime.time]:
2220    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> DatetimeTypes:
2221        if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset):
2222            raise WrongObjectTypeError
2223
2224        data_bytes_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2225        result_tuple = shared_memory.get_obj(data_bytes_offset)
2226        return pickle_loads(result_tuple)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2228    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2229        if ObjectType.tdatetime != read_uint64(shared_memory.base_address, offset):
2230            raise WrongObjectTypeError
2231
2232        data_bytes_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2233        shared_memory.destroy_obj(data_bytes_offset)
2234        shared_memory.free(offset)
class DecimalOffsets(enum.IntEnum):
2241class DecimalOffsets(IntEnum):
2242    data_tuple_offset = 0

An enumeration.

data_tuple_offset = <DecimalOffsets.data_tuple_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TDecimal:
2245class TDecimal:
2246    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Decimal) -> Tuple[Decimal, Offset, Size]:
2247        offset, real_size = shared_memory.malloc(ObjectType.tdecimal, 8)
2248        created_items_offsets: List[Offset] = list()
2249        try:
2250            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.as_tuple()))
2251            created_items_offsets.append(data_tuple_offset)
2252            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2253        except:
2254            shared_memory.free(offset)
2255            for item_offset in created_items_offsets:
2256                shared_memory.destroy_obj(item_offset)
2257            
2258            raise
2259
2260        return Decimal(data_tuple_mapped_obj), offset, real_size
2261    
2262    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Decimal:
2263        if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset):
2264            raise WrongObjectTypeError
2265
2266        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2267        result_tuple = shared_memory.get_obj(data_tuple_offset)
2268        return Decimal(result_tuple)
2269    
2270    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2271        if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset):
2272            raise WrongObjectTypeError
2273
2274        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2275        shared_memory.destroy_obj(data_tuple_offset)
2276        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: decimal.Decimal) -> Tuple[decimal.Decimal, int, int]:
2246    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Decimal) -> Tuple[Decimal, Offset, Size]:
2247        offset, real_size = shared_memory.malloc(ObjectType.tdecimal, 8)
2248        created_items_offsets: List[Offset] = list()
2249        try:
2250            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.as_tuple()))
2251            created_items_offsets.append(data_tuple_offset)
2252            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2253        except:
2254            shared_memory.free(offset)
2255            for item_offset in created_items_offsets:
2256                shared_memory.destroy_obj(item_offset)
2257            
2258            raise
2259
2260        return Decimal(data_tuple_mapped_obj), offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> decimal.Decimal:
2262    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Decimal:
2263        if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset):
2264            raise WrongObjectTypeError
2265
2266        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2267        result_tuple = shared_memory.get_obj(data_tuple_offset)
2268        return Decimal(result_tuple)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2270    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2271        if ObjectType.tdecimal != read_uint64(shared_memory.base_address, offset):
2272            raise WrongObjectTypeError
2273
2274        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2275        shared_memory.destroy_obj(data_tuple_offset)
2276        shared_memory.free(offset)
class SliceOffsets(enum.IntEnum):
2283class SliceOffsets(IntEnum):
2284    data_tuple_offset = 0

An enumeration.

data_tuple_offset = <SliceOffsets.data_tuple_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TSlice:
2287class TSlice:
2288    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: slice) -> Tuple[slice, Offset, Size]:
2289        offset, real_size = shared_memory.malloc(ObjectType.tslice, 8)
2290        created_items_offsets: List[Offset] = list()
2291        try:
2292            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.start, obj.stop, obj.step))
2293            created_items_offsets.append(data_tuple_offset)
2294            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2295        except:
2296            shared_memory.free(offset)
2297            for item_offset in created_items_offsets:
2298                shared_memory.destroy_obj(item_offset)
2299            
2300            raise
2301
2302        return slice(*data_tuple_mapped_obj), offset, real_size
2303    
2304    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> slice:
2305        if ObjectType.tslice != read_uint64(shared_memory.base_address, offset):
2306            raise WrongObjectTypeError
2307
2308        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2309        result_tuple = shared_memory.get_obj(data_tuple_offset)
2310        return slice(*result_tuple)
2311    
2312    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2313        if ObjectType.tslice != read_uint64(shared_memory.base_address, offset):
2314            raise WrongObjectTypeError
2315
2316        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2317        shared_memory.destroy_obj(data_tuple_offset)
2318        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: slice) -> Tuple[slice, int, int]:
2288    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: slice) -> Tuple[slice, Offset, Size]:
2289        offset, real_size = shared_memory.malloc(ObjectType.tslice, 8)
2290        created_items_offsets: List[Offset] = list()
2291        try:
2292            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.start, obj.stop, obj.step))
2293            created_items_offsets.append(data_tuple_offset)
2294            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2295        except:
2296            shared_memory.free(offset)
2297            for item_offset in created_items_offsets:
2298                shared_memory.destroy_obj(item_offset)
2299            
2300            raise
2301
2302        return slice(*data_tuple_mapped_obj), offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> slice:
2304    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> slice:
2305        if ObjectType.tslice != read_uint64(shared_memory.base_address, offset):
2306            raise WrongObjectTypeError
2307
2308        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2309        result_tuple = shared_memory.get_obj(data_tuple_offset)
2310        return slice(*result_tuple)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2312    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2313        if ObjectType.tslice != read_uint64(shared_memory.base_address, offset):
2314            raise WrongObjectTypeError
2315
2316        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2317        shared_memory.destroy_obj(data_tuple_offset)
2318        shared_memory.free(offset)
class ComplexOffsets(enum.IntEnum):
2325class ComplexOffsets(IntEnum):
2326    data_tuple_offset = 0

An enumeration.

data_tuple_offset = <ComplexOffsets.data_tuple_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TComplex:
2329class TComplex:
2330    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: complex) -> Tuple[complex, Offset, Size]:
2331        offset, real_size = shared_memory.malloc(ObjectType.tfastset, 8)
2332        created_items_offsets: List[Offset] = list()
2333        try:
2334            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.real, obj.imag))
2335            created_items_offsets.append(data_tuple_offset)
2336            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2337        except:
2338            shared_memory.free(offset)
2339            for item_offset in created_items_offsets:
2340                shared_memory.destroy_obj(item_offset)
2341            
2342            raise
2343
2344        return complex(real=data_tuple_mapped_obj[0], imag=data_tuple_mapped_obj[1]), offset, real_size
2345    
2346    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> complex:
2347        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2348            raise WrongObjectTypeError
2349
2350        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2351        result_tuple = shared_memory.get_obj(data_tuple_offset)
2352        return complex(real=result_tuple[0], imag=result_tuple[1])
2353    
2354    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2355        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2356            raise WrongObjectTypeError
2357
2358        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2359        shared_memory.destroy_obj(data_tuple_offset)
2360        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: complex) -> Tuple[complex, int, int]:
2330    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: complex) -> Tuple[complex, Offset, Size]:
2331        offset, real_size = shared_memory.malloc(ObjectType.tfastset, 8)
2332        created_items_offsets: List[Offset] = list()
2333        try:
2334            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.real, obj.imag))
2335            created_items_offsets.append(data_tuple_offset)
2336            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2337        except:
2338            shared_memory.free(offset)
2339            for item_offset in created_items_offsets:
2340                shared_memory.destroy_obj(item_offset)
2341            
2342            raise
2343
2344        return complex(real=data_tuple_mapped_obj[0], imag=data_tuple_mapped_obj[1]), offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> complex:
2346    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> complex:
2347        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2348            raise WrongObjectTypeError
2349
2350        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2351        result_tuple = shared_memory.get_obj(data_tuple_offset)
2352        return complex(real=result_tuple[0], imag=result_tuple[1])
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2354    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2355        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2356            raise WrongObjectTypeError
2357
2358        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2359        shared_memory.destroy_obj(data_tuple_offset)
2360        shared_memory.free(offset)
class FastLimitedSet(builtins.set):
2367class FastLimitedSet(set):
2368    ...

set() -> new empty set object set(iterable) -> new set object

Build an unordered collection of unique elements.

Inherited Members
builtins.set
set
add
clear
copy
discard
difference
difference_update
intersection
intersection_update
isdisjoint
issubset
issuperset
pop
remove
symmetric_difference
symmetric_difference_update
union
update
class FastSetOffsets(enum.IntEnum):
2371class FastSetOffsets(IntEnum):
2372    data_tuple_offset = 0

An enumeration.

data_tuple_offset = <FastSetOffsets.data_tuple_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TFastSet:
2375class TFastSet:
2376    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[set, Offset, Size]:
2377        offset, real_size = shared_memory.malloc(ObjectType.tfastset, 8)
2378        created_items_offsets: List[Offset] = list()
2379        try:
2380            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj))
2381            created_items_offsets.append(data_tuple_offset)
2382            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2383        except:
2384            shared_memory.free(offset)
2385            for item_offset in created_items_offsets:
2386                shared_memory.destroy_obj(item_offset)
2387            
2388            raise
2389
2390        return set(data_tuple_mapped_obj), offset, real_size
2391    
2392    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> set:
2393        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2394            raise WrongObjectTypeError
2395
2396        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2397        result_tuple = shared_memory.get_obj(data_tuple_offset)
2398        return set(result_tuple)
2399    
2400    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2401        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2402            raise WrongObjectTypeError
2403
2404        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2405        shared_memory.destroy_obj(data_tuple_offset)
2406        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: set) -> Tuple[set, int, int]:
2376    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[set, Offset, Size]:
2377        offset, real_size = shared_memory.malloc(ObjectType.tfastset, 8)
2378        created_items_offsets: List[Offset] = list()
2379        try:
2380            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj))
2381            created_items_offsets.append(data_tuple_offset)
2382            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2383        except:
2384            shared_memory.free(offset)
2385            for item_offset in created_items_offsets:
2386                shared_memory.destroy_obj(item_offset)
2387            
2388            raise
2389
2390        return set(data_tuple_mapped_obj), offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> set:
2392    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> set:
2393        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2394            raise WrongObjectTypeError
2395
2396        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2397        result_tuple = shared_memory.get_obj(data_tuple_offset)
2398        return set(result_tuple)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2400    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2401        if ObjectType.tfastset != read_uint64(shared_memory.base_address, offset):
2402            raise WrongObjectTypeError
2403
2404        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2405        shared_memory.destroy_obj(data_tuple_offset)
2406        shared_memory.free(offset)
class FastLimitedDict(builtins.dict):
2413class FastLimitedDict(dict):
2414    ...
Inherited Members
builtins.dict
get
setdefault
pop
popitem
keys
items
values
update
fromkeys
clear
copy
class FastDictOffsets(enum.IntEnum):
2417class FastDictOffsets(IntEnum):
2418    data_tuple_offset = 0

An enumeration.

data_tuple_offset = <FastDictOffsets.data_tuple_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TFastDict:
2421class TFastDict:
2422    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: dict) -> Tuple[dict, Offset, Size]:
2423        offset, real_size = shared_memory.malloc(ObjectType.tfastdict, 8)
2424        created_items_offsets: List[Offset] = list()
2425        try:
2426            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.items()))
2427            created_items_offsets.append(data_tuple_offset)
2428            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2429        except:
2430            shared_memory.free(offset)
2431            for item_offset in created_items_offsets:
2432                shared_memory.destroy_obj(item_offset)
2433            
2434            raise
2435
2436        return dict(data_tuple_mapped_obj), offset, real_size
2437    
2438    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
2439        if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset):
2440            raise WrongObjectTypeError
2441
2442        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2443        result_tuple = shared_memory.get_obj(data_tuple_offset)
2444        return dict(result_tuple)
2445    
2446    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2447        if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset):
2448            raise WrongObjectTypeError
2449
2450        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2451        shared_memory.destroy_obj(data_tuple_offset)
2452        shared_memory.free(offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: dict) -> Tuple[dict, int, int]:
2422    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: dict) -> Tuple[dict, Offset, Size]:
2423        offset, real_size = shared_memory.malloc(ObjectType.tfastdict, 8)
2424        created_items_offsets: List[Offset] = list()
2425        try:
2426            data_tuple_mapped_obj, data_tuple_offset, data_tuple_size = shared_memory.put_obj(tuple(obj.items()))
2427            created_items_offsets.append(data_tuple_offset)
2428            write_uint64(shared_memory.base_address, offset + 16 + 0, data_tuple_offset)
2429        except:
2430            shared_memory.free(offset)
2431            for item_offset in created_items_offsets:
2432                shared_memory.destroy_obj(item_offset)
2433            
2434            raise
2435
2436        return dict(data_tuple_mapped_obj), offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> dict:
2438    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
2439        if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset):
2440            raise WrongObjectTypeError
2441
2442        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2443        result_tuple = shared_memory.get_obj(data_tuple_offset)
2444        return dict(result_tuple)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2446    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2447        if ObjectType.tfastdict != read_uint64(shared_memory.base_address, offset):
2448            raise WrongObjectTypeError
2449
2450        data_tuple_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
2451        shared_memory.destroy_obj(data_tuple_offset)
2452        shared_memory.free(offset)
class SetOffsets(enum.IntEnum):
2459class SetOffsets(IntEnum):
2460    size = 0
2461    capacity = 1
2462    hashmap_offset = 2

An enumeration.

size = <SetOffsets.size: 0>
capacity = <SetOffsets.capacity: 1>
hashmap_offset = <SetOffsets.hashmap_offset: 2>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class SetHashmapFieldTypes(enum.IntEnum):
2465class SetHashmapFieldTypes(IntEnum):
2466    tnone = 0
2467    tobj = 1
2468    tbucket = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class SetHashmapItemOffsets(enum.IntEnum):
2471class SetHashmapItemOffsets(IntEnum):
2472    field_type = 0
2473    field_hash = 1
2474    obj_or_bucket = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class SetBucketOffsets(enum.IntEnum):
2477class SetBucketOffsets(IntEnum):
2478    field_hash = 0
2479    obj = 1

An enumeration.

field_hash = <SetBucketOffsets.field_hash: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class ISet(BaseIObject, collections.abc.Set):
2482class ISet(BaseIObject, AbsSet):
2483    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsSet = None) -> None:
2484        self._shared_memory = shared_memory
2485        self._base_address = shared_memory.base_address
2486        self._obj_size = None
2487        self._offset: Offset = None
2488        self._offset__data: Offset = None
2489        self._offset__size_offset: Offset = None
2490        self._offset__capacity_offset: Offset = None
2491        self._offset__hashmap_offset: Offset = None
2492        self._load_factor = 0.75
2493        self._hash_bits: int = None
2494        self._capacity: int = None
2495        self._size: int = None
2496        self.hashmap: IList = None
2497        self.hashmap_offset: Offset = None
2498        self.buckets: Dict[int, IList] = dict()
2499
2500        if offset is None:
2501            if obj is None:
2502                # obj = frozenset(set())
2503                data_len = 16
2504            else:
2505                data_len = len(obj)
2506
2507            self._size: int = data_len
2508            self.hash_bits = 1
2509            self.capacity = int(ceil(data_len / self._load_factor))
2510
2511            offset, self._obj_size = shared_memory.malloc(ObjectType.tset, 24)
2512            try:
2513                self._offset = offset
2514                offset__data = offset + 16
2515                self._offset__data = offset__data
2516                self._offset__size_offset: Offset = offset__data + 0
2517                self._offset__capacity_offset: Offset = offset__data + 8
2518                self._offset__hashmap_offset = offset__data + 16
2519
2520                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
2521                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
2522
2523                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
2524                self.hashmap = cast(IList, self.hashmap)
2525                self.hashmap_offset = hashmap_offset
2526                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
2527                hashmap_capacity = self.capacity * 3
2528                self.hashmap.set_capacity(hashmap_capacity)
2529                self.hashmap.extend_with(hashmap_capacity, 0)
2530                hash_bits: int = self.hash_bits
2531                if obj is not None:
2532                    for item in obj:
2533                        item_hash = hash(item)
2534                        item_info_index: int = mask_least_significant_bits(item_hash, hash_bits) * 3
2535                        field_type_index = item_info_index + 0
2536                        item_hash_index = item_info_index + 1
2537                        item_bucket_index = item_info_index + 2
2538                        field_type = self.hashmap[field_type_index]
2539                        if 0 == field_type:
2540                            self.hashmap[field_type_index] = 1
2541                            self.hashmap[item_hash_index] = item_hash
2542                            self.hashmap[item_bucket_index] = item
2543                        elif 1 == field_type:
2544                            bucket, bucket_offset, _ = shared_memory.put_obj(list())
2545                            bucket = cast(IList, bucket)
2546                            bucket.set_capacity(2)
2547                            bucket.extend_with(2, 0)
2548                            self.buckets[item_info_index] = bucket
2549                            self.hashmap.move_item_to_list(item_hash_index, bucket, 0)
2550                            self.hashmap.move_item_to_list(item_bucket_index, bucket, 1)
2551                            self.hashmap[field_type_index] = 2
2552                            self.hashmap[item_bucket_index] = bucket_offset
2553                            bucket.append(item_hash)
2554                            bucket.append(item)
2555                        elif 2 == field_type:
2556                            bucket = self.buckets[item_info_index]
2557                            bucket.append(item_hash)
2558                            bucket.append(item)
2559                        else:
2560                            raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2561            except:
2562                self._free_mem()
2563                raise
2564        else:
2565            self._offset = offset
2566            offset__data = offset + 16
2567            self._offset__data = offset__data
2568            self._offset__size_offset: Offset = offset__data + 0
2569            self._offset__capacity_offset: Offset = offset__data + 8
2570            self._offset__hashmap_offset = offset__data + 16
2571
2572            self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2573            self.hash_bits = 1
2574            self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2575            hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2576            
2577            self.hashmap_offset = hashmap_offset
2578            self.hashmap = IList(shared_memory, hashmap_offset)
2579            item_info_index: int = 0
2580            # for item_info_index in range(self.capacity):
2581            #     field_type_index = item_info_index * 3 + 0
2582            #     item_hash_index = item_info_index * 3 + 1
2583            #     item_bucket_index = item_info_index * 3 + 2
2584            #     field_type = self.hashmap[field_type_index]
2585            #     if 0 == field_type:
2586            #         continue
2587            #     elif 1 == field_type:
2588            #         continue
2589            #     elif 2 == field_type:
2590            #         bucket_offset = self.hashmap[item_bucket_index]
2591            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2592            #     else:
2593            #         raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2594
2595            for item_info_index in range(0, self.capacity * 3, 3):
2596                field_type_index = item_info_index + 0
2597                item_hash_index = item_info_index + 1
2598                item_bucket_index = item_info_index + 2
2599                field_type = self.hashmap[field_type_index]
2600                if 0 == field_type:
2601                    continue
2602                elif 1 == field_type:
2603                    continue
2604                elif 2 == field_type:
2605                    bucket_offset = self.hashmap[item_bucket_index]
2606                    self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2607                else:
2608                    raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2609
2610    def __len__(self):
2611        return self._size
2612    
2613    def __iter__(self):
2614        return ISetIterator(self)
2615    
2616    def __contains__(self, obj: Any) -> bool:
2617        item_hash = hash(obj)
2618        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
2619        field_type_index = item_info_index + 0
2620        item_hash_index = item_info_index + 1
2621        item_bucket_index = item_info_index + 2
2622        field_type = self.hashmap[field_type_index]
2623        if 0 == field_type:
2624            return False
2625        elif 1 == field_type:
2626            return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index])
2627        elif 2 == field_type:
2628            bucket = self.buckets[item_info_index]
2629            # for sub_item_info_index in range(len(bucket)):
2630            for sub_item_info_index in range(0, len(bucket) * 2, 2):
2631                sub_item_hash_index = sub_item_info_index + 0
2632                sub_item_obj_index = sub_item_info_index + 1
2633                if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_obj_index]):
2634                    return True
2635            
2636            return False
2637        else:
2638            raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2639
2640    def __hash__(self):
2641        return self._hash()
2642
2643    @property
2644    def hash_bits(self) -> int:
2645        return self._hash_bits
2646
2647    @hash_bits.setter
2648    def hash_bits(self, value: int) -> None:
2649        self._hash_bits = value
2650        self._capacity = 2 ** value
2651    
2652    @property
2653    def capacity(self) -> int:
2654        return self._capacity
2655
2656    @capacity.setter
2657    def capacity(self, value: int) -> None:
2658        if value <= self._capacity:
2659            return
2660        
2661        if value <= 2:
2662            self.hash_bits = 1
2663        else:
2664            self.hash_bits = int(ceil(log2(value)))
2665    
2666    def __str__(self) -> str:
2667        return set(self).__str__()
2668
2669    def __repr__(self) -> str:
2670        return set(self).__repr__()
2671
2672    def _free_mem(self):
2673        if self._offset is not None:
2674            for _, bucket in self.buckets.items():
2675                self._shared_memory.destroy_obj(bucket._offset)
2676            
2677            self.buckets.clear()
2678            if self.hashmap_offset is not None:
2679                self._shared_memory.destroy_obj(self.hashmap_offset)
2680                self.hashmap_offset = None
2681
2682            self._shared_memory.free(self._offset)
2683            self._offset = None

A set is a finite, iterable container.

This class provides concrete generic implementations of all methods except for __contains__, __iter__ and __len__.

To override the comparisons (presumably for speed, as the semantics are fixed), redefine __le__ and __ge__, then the other operations will automatically follow suit.

ISet( shared_memory: SharedMemory, offset: int = None, obj: collections.abc.Set = None)
2483    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsSet = None) -> None:
2484        self._shared_memory = shared_memory
2485        self._base_address = shared_memory.base_address
2486        self._obj_size = None
2487        self._offset: Offset = None
2488        self._offset__data: Offset = None
2489        self._offset__size_offset: Offset = None
2490        self._offset__capacity_offset: Offset = None
2491        self._offset__hashmap_offset: Offset = None
2492        self._load_factor = 0.75
2493        self._hash_bits: int = None
2494        self._capacity: int = None
2495        self._size: int = None
2496        self.hashmap: IList = None
2497        self.hashmap_offset: Offset = None
2498        self.buckets: Dict[int, IList] = dict()
2499
2500        if offset is None:
2501            if obj is None:
2502                # obj = frozenset(set())
2503                data_len = 16
2504            else:
2505                data_len = len(obj)
2506
2507            self._size: int = data_len
2508            self.hash_bits = 1
2509            self.capacity = int(ceil(data_len / self._load_factor))
2510
2511            offset, self._obj_size = shared_memory.malloc(ObjectType.tset, 24)
2512            try:
2513                self._offset = offset
2514                offset__data = offset + 16
2515                self._offset__data = offset__data
2516                self._offset__size_offset: Offset = offset__data + 0
2517                self._offset__capacity_offset: Offset = offset__data + 8
2518                self._offset__hashmap_offset = offset__data + 16
2519
2520                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
2521                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
2522
2523                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
2524                self.hashmap = cast(IList, self.hashmap)
2525                self.hashmap_offset = hashmap_offset
2526                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
2527                hashmap_capacity = self.capacity * 3
2528                self.hashmap.set_capacity(hashmap_capacity)
2529                self.hashmap.extend_with(hashmap_capacity, 0)
2530                hash_bits: int = self.hash_bits
2531                if obj is not None:
2532                    for item in obj:
2533                        item_hash = hash(item)
2534                        item_info_index: int = mask_least_significant_bits(item_hash, hash_bits) * 3
2535                        field_type_index = item_info_index + 0
2536                        item_hash_index = item_info_index + 1
2537                        item_bucket_index = item_info_index + 2
2538                        field_type = self.hashmap[field_type_index]
2539                        if 0 == field_type:
2540                            self.hashmap[field_type_index] = 1
2541                            self.hashmap[item_hash_index] = item_hash
2542                            self.hashmap[item_bucket_index] = item
2543                        elif 1 == field_type:
2544                            bucket, bucket_offset, _ = shared_memory.put_obj(list())
2545                            bucket = cast(IList, bucket)
2546                            bucket.set_capacity(2)
2547                            bucket.extend_with(2, 0)
2548                            self.buckets[item_info_index] = bucket
2549                            self.hashmap.move_item_to_list(item_hash_index, bucket, 0)
2550                            self.hashmap.move_item_to_list(item_bucket_index, bucket, 1)
2551                            self.hashmap[field_type_index] = 2
2552                            self.hashmap[item_bucket_index] = bucket_offset
2553                            bucket.append(item_hash)
2554                            bucket.append(item)
2555                        elif 2 == field_type:
2556                            bucket = self.buckets[item_info_index]
2557                            bucket.append(item_hash)
2558                            bucket.append(item)
2559                        else:
2560                            raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2561            except:
2562                self._free_mem()
2563                raise
2564        else:
2565            self._offset = offset
2566            offset__data = offset + 16
2567            self._offset__data = offset__data
2568            self._offset__size_offset: Offset = offset__data + 0
2569            self._offset__capacity_offset: Offset = offset__data + 8
2570            self._offset__hashmap_offset = offset__data + 16
2571
2572            self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2573            self.hash_bits = 1
2574            self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2575            hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2576            
2577            self.hashmap_offset = hashmap_offset
2578            self.hashmap = IList(shared_memory, hashmap_offset)
2579            item_info_index: int = 0
2580            # for item_info_index in range(self.capacity):
2581            #     field_type_index = item_info_index * 3 + 0
2582            #     item_hash_index = item_info_index * 3 + 1
2583            #     item_bucket_index = item_info_index * 3 + 2
2584            #     field_type = self.hashmap[field_type_index]
2585            #     if 0 == field_type:
2586            #         continue
2587            #     elif 1 == field_type:
2588            #         continue
2589            #     elif 2 == field_type:
2590            #         bucket_offset = self.hashmap[item_bucket_index]
2591            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2592            #     else:
2593            #         raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2594
2595            for item_info_index in range(0, self.capacity * 3, 3):
2596                field_type_index = item_info_index + 0
2597                item_hash_index = item_info_index + 1
2598                item_bucket_index = item_info_index + 2
2599                field_type = self.hashmap[field_type_index]
2600                if 0 == field_type:
2601                    continue
2602                elif 1 == field_type:
2603                    continue
2604                elif 2 == field_type:
2605                    bucket_offset = self.hashmap[item_bucket_index]
2606                    self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2607                else:
2608                    raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
hashmap: IList
hashmap_offset: int
buckets: Dict[int, IList]
hash_bits: int
2643    @property
2644    def hash_bits(self) -> int:
2645        return self._hash_bits
capacity: int
2652    @property
2653    def capacity(self) -> int:
2654        return self._capacity
Inherited Members
collections.abc.Set
isdisjoint
class ISetIterator:
2686class ISetIterator:
2687    def __init__(self, iset: ISet) -> None:
2688        self._iset = iset
2689        self._index = 0
2690        self._sub_index = 0
2691    
2692    def __next__(self):
2693        while self._index < self._iset.capacity:
2694            item_info_index: int = self._index * 3
2695            field_type_index = item_info_index + 0
2696            item_hash_index = item_info_index + 1
2697            item_bucket_index = item_info_index + 2
2698            field_type = self._iset.hashmap[field_type_index]
2699            if 0 == field_type:
2700                self._index += 1
2701                continue
2702            elif 1 == field_type:
2703                result = self._iset.hashmap[item_bucket_index]
2704                self._index += 1
2705                break
2706            elif 2 == field_type:
2707                bucket = self._iset.buckets[item_info_index]
2708                sub_item_info_index = self._sub_index
2709                sub_item_hash_index = sub_item_info_index * 2 + 0
2710                sub_item_obj_index = sub_item_info_index * 2 + 1
2711                if (sub_item_info_index * 2) >= len(bucket):
2712                    self._sub_index = 0
2713                    self._index += 1
2714                    continue
2715
2716                result = bucket[sub_item_obj_index]
2717                self._sub_index += 1
2718                break
2719            else:
2720                raise ValueError(f'Unknown SetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2721        else:
2722            raise StopIteration
2723
2724        return result
2725    
2726    def __iter__(self):
2727        return self
ISetIterator( iset: ISet)
2687    def __init__(self, iset: ISet) -> None:
2688        self._iset = iset
2689        self._index = 0
2690        self._sub_index = 0
class TSet:
2730class TSet:
2731    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[AbsSet, Offset, Size]:
2732        obj: ISet = ISet(shared_memory, obj=obj)
2733        return obj, obj._offset, obj._obj_size
2734    
2735    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> ISet:
2736        if ObjectType.tset != read_uint64(shared_memory.base_address, offset):
2737            raise WrongObjectTypeError
2738        
2739        return ISet(shared_memory, offset)
2740    
2741    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2742        if ObjectType.tset != read_uint64(shared_memory.base_address, offset):
2743            raise WrongObjectTypeError
2744        
2745        obj: ISet = ISet(shared_memory, offset)
2746        obj._free_mem()
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: set) -> Tuple[collections.abc.Set, int, int]:
2731    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[AbsSet, Offset, Size]:
2732        obj: ISet = ISet(shared_memory, obj=obj)
2733        return obj, obj._offset, obj._obj_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> ISet:
2735    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> ISet:
2736        if ObjectType.tset != read_uint64(shared_memory.base_address, offset):
2737            raise WrongObjectTypeError
2738        
2739        return ISet(shared_memory, offset)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
2741    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
2742        if ObjectType.tset != read_uint64(shared_memory.base_address, offset):
2743            raise WrongObjectTypeError
2744        
2745        obj: ISet = ISet(shared_memory, offset)
2746        obj._free_mem()
class MutableSetOffsets(enum.IntEnum):
2753class MutableSetOffsets(IntEnum):
2754    size = 0
2755    capacity = 1
2756    hashmap_offset = 2
2757    refresh_counter = 3

An enumeration.

capacity = <MutableSetOffsets.capacity: 1>
hashmap_offset = <MutableSetOffsets.hashmap_offset: 2>
refresh_counter = <MutableSetOffsets.refresh_counter: 3>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableSetHashmapFieldTypes(enum.IntEnum):
2760class MutableSetHashmapFieldTypes(IntEnum):
2761    tnone = 0
2762    tobj = 1
2763    tbucket = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableSetHashmapItemOffsets(enum.IntEnum):
2766class MutableSetHashmapItemOffsets(IntEnum):
2767    field_type = 0
2768    field_hash = 1
2769    obj_or_bucket = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableSetBucketFieldTypes(enum.IntEnum):
2772class MutableSetBucketFieldTypes(IntEnum):
2773    tnone = 0
2774    tobj = 1

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableSetBucketOffsets(enum.IntEnum):
2777class MutableSetBucketOffsets(IntEnum):
2778    field_type = 0
2779    field_hash = 1
2780    obj = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class IMutableSet(BaseIObject, collections.abc.MutableSet):
2783class IMutableSet(BaseIObject, AbsMutableSet):
2784    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableSet = None) -> None:
2785        self._shared_memory = shared_memory
2786        self._base_address = shared_memory.base_address
2787        self._obj_size = None
2788        self._offset: Offset = None
2789        self._offset__data: Offset = None
2790        self._offset__size_offset: Offset = None
2791        self._offset__capacity_offset: Offset = None
2792        self._offset__hashmap_offset: Offset = None
2793        self._offset__refresh_counter_offset: Offset = None
2794        self._load_factor = 0.75
2795        self._load_factor_2 = 0.5625
2796        self._hash_bits: int = None
2797        self._capacity: int = None
2798        self._min_capacity: int = None
2799        self._size: int = None
2800        self.hashmap: IList = None
2801        self._refresh_counter: int = 0
2802        self.hashmap_offset: Offset = None
2803        self.buckets: Dict[int, IList] = dict()
2804
2805        self.ignore_rehash: bool = True
2806
2807        if offset is None:
2808            if obj is None:
2809                # obj = frozenset(set())
2810                data_len = 16
2811            else:
2812                data_len = len(obj)
2813
2814            self._size = 0
2815            self.hash_bits = 1
2816            self.capacity = int(ceil(data_len / self._load_factor))
2817            self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2818
2819            offset, self._obj_size = shared_memory.malloc(ObjectType.tmutableset, 32)
2820            try:
2821                self._offset = offset
2822                offset__data = offset + 16
2823                self._offset__data = offset__data
2824                self._offset__size_offset: Offset = offset__data + 0
2825                self._offset__capacity_offset: Offset = offset__data + 8
2826                self._offset__hashmap_offset = offset__data + 16
2827                self._offset__refresh_counter_offset = offset__data + 24
2828
2829                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
2830                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
2831                write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter)
2832
2833                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
2834                self.hashmap = cast(IList, self.hashmap)
2835                self.hashmap_offset = hashmap_offset
2836                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
2837                hashmap_capacity = self.capacity * 3
2838                self.hashmap.set_capacity(hashmap_capacity)
2839                self.hashmap.extend_with(hashmap_capacity, 0)
2840                hash_bits: int = self.hash_bits
2841                if obj is None:
2842                    pass
2843                elif isinstance(obj, IMutableSet):
2844                    self._move_from(obj)
2845                else:
2846                    for item in obj:
2847                        self.add(item)
2848                
2849                self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
2850                
2851                self.ignore_rehash = False
2852            except:
2853                self._free_mem()
2854                raise
2855        else:
2856            self._refresh_hashmap(offset)
2857            self.ignore_rehash = False
2858
2859            # self._offset = offset
2860            # offset__data = offset + 16
2861            # self._offset__data = offset__data
2862            # self._offset__size_offset: Offset = offset__data + 0
2863            # self._offset__capacity_offset: Offset = offset__data + 8
2864            # self._offset__hashmap_offset = offset__data + 16
2865
2866            # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2867            # self.hash_bits = 1
2868            # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2869            # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2870            # self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2871            
2872            # self.hashmap_offset = hashmap_offset
2873            # self.hashmap = IList(shared_memory, hashmap_offset)
2874            # item_info_index: int = 0
2875            # # for item_info_index in range(self.capacity):
2876            # #     field_type_index = item_info_index * 3 + 0
2877            # #     item_hash_index = item_info_index * 3 + 1
2878            # #     item_bucket_index = item_info_index * 3 + 2
2879            # #     field_type = self.hashmap[field_type_index]
2880            # #     if 0 == field_type:
2881            # #         continue
2882            # #     elif 1 == field_type:
2883            # #         continue
2884            # #     elif 2 == field_type:
2885            # #         bucket_offset = self.hashmap[item_bucket_index]
2886            # #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2887            # #     else:
2888            # #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2889
2890            # for item_info_index in range(0, self.capacity * 3, 3):
2891            #     field_type_index = item_info_index + 0
2892            #     item_hash_index = item_info_index + 1
2893            #     item_bucket_index = item_info_index + 2
2894            #     field_type = self.hashmap[field_type_index]
2895            #     if 0 == field_type:
2896            #         continue
2897            #     elif 1 == field_type:
2898            #         continue
2899            #     elif 2 == field_type:
2900            #         bucket_offset = self.hashmap[item_bucket_index]
2901            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2902            #     else:
2903            #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2904
2905            # self.ignore_rehash = False
2906
2907    def _refresh_hashmap(self, offset: Offset):
2908        # ignore_rehash = self.ignore_rehash
2909        # self.ignore_rehash = True
2910
2911        self._hash_bits = None
2912        self._capacity = None
2913        self._min_capacity = None
2914        self._size = None
2915        self.hashmap = None
2916        self._refresh_counter = 0
2917        self.hashmap_offset = None
2918        self.buckets = dict()
2919
2920        shared_memory = self._shared_memory
2921        self._offset = offset
2922        offset__data = offset + 16
2923        self._offset__data = offset__data
2924        self._offset__size_offset: Offset = offset__data + 0
2925        self._offset__capacity_offset: Offset = offset__data + 8
2926        self._offset__hashmap_offset = offset__data + 16
2927        self._offset__refresh_counter_offset = offset__data + 24
2928
2929        self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
2930        self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2931        self.hash_bits = 1
2932        self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2933        hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2934        self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2935        
2936        self.hashmap_offset = hashmap_offset
2937        self.hashmap = IList(shared_memory, hashmap_offset)
2938        item_info_index: int = 0
2939        # for item_info_index in range(self.capacity):
2940        #     field_type_index = item_info_index * 3 + 0
2941        #     item_hash_index = item_info_index * 3 + 1
2942        #     item_bucket_index = item_info_index * 3 + 2
2943        #     field_type = self.hashmap[field_type_index]
2944        #     if 0 == field_type:
2945        #         continue
2946        #     elif 1 == field_type:
2947        #         continue
2948        #     elif 2 == field_type:
2949        #         bucket_offset = self.hashmap[item_bucket_index]
2950        #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2951        #     else:
2952        #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2953
2954        for item_info_index in range(0, self.capacity * 3, 3):
2955            field_type_index = item_info_index + 0
2956            item_hash_index = item_info_index + 1
2957            item_bucket_index = item_info_index + 2
2958            field_type = self.hashmap[field_type_index]
2959            if 0 == field_type:
2960                continue
2961            elif 1 == field_type:
2962                continue
2963            elif 2 == field_type:
2964                bucket_offset = self.hashmap[item_bucket_index]
2965                self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2966            else:
2967                raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2968
2969        # self.ignore_rehash = ignore_rehash
2970    
2971    @property
2972    def refresh_counter(self):
2973        return read_uint64(self._base_address, self._offset__refresh_counter_offset)
2974    
2975    def _increase_refresh_counter(self):
2976        if not self.ignore_rehash:
2977            self._refresh_counter += 1
2978            write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter)
2979    
2980    def _check_hashmap(self):
2981        if self.ignore_rehash:
2982            return False
2983        else:
2984            base_address = self._base_address
2985            refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset)
2986            # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset)
2987            # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset):
2988            if self._refresh_counter != refresh_counter:
2989                self._refresh_hashmap(self._offset)
2990                return True
2991            
2992            return False
2993
2994    # @property
2995    # def hashmap(self) -> IList:
2996    #     if self.ignore_rehash:
2997    #         return self._hashmap
2998    #     else:
2999    #         hashmap_offset = read_uint64(self._base_address, self._offset__hashmap_offset)
3000    #         if (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset):
3001    #             self._refresh_hashmap(self._offset)
3002            
3003    #         return self._hashmap
3004    
3005    # @hashmap.setter
3006    # def hashmap(self, value: IList):
3007    #     self._hashmap = value
3008
3009    def _increase_size(self):
3010        self._size += 1
3011        write_uint64(self._base_address, self._offset__size_offset, self._size)
3012        if (self._size > self._capacity) or (self._size < self._min_capacity):
3013            self._rehash()
3014    
3015    def _decrease_size(self):
3016        self._size -= 1
3017        if self._size < 0:
3018            raise RuntimeError('Size of the set is negative')
3019
3020        write_uint64(self._base_address, self._offset__size_offset, self._size)
3021        if (self._size > self._capacity) or (self._size < self._min_capacity):
3022            self._rehash()
3023    
3024    def _move_from(self, other: 'IMutableSet'):
3025        for value_hash, value_type, value_offset in other.iter_offset_pop():
3026            self.add_as_offset(value_hash, value_type, value_offset)
3027    
3028    def _rehash(self):
3029        if self.ignore_rehash:
3030            return
3031        
3032        self._increase_refresh_counter()
3033
3034        ignore_rehash = self.ignore_rehash
3035        self.ignore_rehash = True
3036
3037        new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self)
3038        new_other = cast(IMutableSet, new_other)
3039
3040        other_capacity = new_other._capacity
3041        other_hash_bits = new_other._hash_bits
3042        other_min_capacity = new_other._min_capacity
3043        other_size = new_other._size
3044        # other_refresh_counter = new_other._refresh_counter
3045        other_hashmap = new_other.hashmap
3046        other_hashmap_offset = new_other.hashmap_offset
3047        other_buckets = new_other.buckets
3048        other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset)
3049        other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset)
3050        other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset)
3051        # other_refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset)
3052        
3053        new_other._capacity = self._capacity
3054        new_other._hash_bits = self._hash_bits
3055        new_other._min_capacity = self._min_capacity
3056        new_other._size = self._size
3057        # new_other._refresh_counter = self._refresh_counter
3058        new_other.hashmap = self.hashmap
3059        new_other.hashmap_offset = self.hashmap_offset
3060        new_other.buckets = self.buckets
3061        write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset))
3062        write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset))
3063        write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset))
3064        # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset))
3065
3066        self._capacity = other_capacity
3067        self._hash_bits = other_hash_bits
3068        self._min_capacity = other_min_capacity
3069        self._size = other_size
3070        # self._refresh_counter = other_refresh_counter
3071        self.hashmap = other_hashmap
3072        self.hashmap_offset = other_hashmap_offset
3073        self.buckets = other_buckets
3074        write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin)
3075        write_uint64(self._base_address, self._offset__size_offset, other_size_bin)
3076        write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin)
3077        # write_uint64(self._base_address, self._offset__refresh_counter_offset, other_refresh_counter_bin)
3078
3079        self._shared_memory.destroy_obj(new_other_offset)
3080
3081        self.ignore_rehash = ignore_rehash
3082
3083    def __len__(self):
3084        self._check_hashmap()
3085        return self._size
3086    
3087    def __iter__(self):
3088        self._check_hashmap()
3089        return IMutableSetIterator(self)
3090    
3091    def iter_offset(self):
3092        self._check_hashmap()
3093        return IMutableSetIteratorAsOffset(self)
3094    
3095    def iter_offset_pop(self):
3096        self._check_hashmap()
3097        return IMutableSetIteratorAsOffset(self, True)
3098    
3099    def __contains__(self, obj: Any) -> bool:
3100        self._check_hashmap()
3101        item_hash = hash(obj)
3102        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3103        field_type_index = item_info_index + 0
3104        item_hash_index = item_info_index + 1
3105        item_bucket_index = item_info_index + 2
3106        field_type = self.hashmap[field_type_index]
3107        if 0 == field_type:
3108            return False
3109        elif 1 == field_type:
3110            return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index])
3111        elif 2 == field_type:
3112            bucket_offset = self.hashmap[item_bucket_index]
3113            try:
3114                bucket = self.buckets[item_info_index]
3115                if bucket._offset != bucket_offset:
3116                    raise KeyError
3117            except KeyError:
3118                raise
3119                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3120
3121            for bucket_item_index in range(0, len(bucket), 3):
3122                bucket_field_type = bucket[bucket_item_index + 0]
3123                if 0 == bucket_field_type:
3124                    continue
3125
3126                bucket_field_hash = bucket[bucket_item_index + 1]
3127                bucket_obj = bucket[bucket_item_index + 2]
3128                if (item_hash == bucket_field_hash) and (obj == bucket_obj):
3129                    return True
3130            
3131            return False
3132        else:
3133            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3134
3135    def add(self, value):
3136        """Add an element."""
3137        self._check_hashmap()
3138        item = value
3139        item_hash = hash(item)
3140        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3141        field_type_index = item_info_index + 0
3142        item_hash_index = item_info_index + 1
3143        item_bucket_index = item_info_index + 2
3144        field_type = self.hashmap[field_type_index]
3145        if 0 == field_type:
3146            self.hashmap[field_type_index] = 1
3147            self.hashmap[item_hash_index] = item_hash
3148            self.hashmap[item_bucket_index] = item
3149            self._increase_size()
3150            return
3151        elif 1 == field_type:
3152            if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap[item_bucket_index]):
3153                return
3154            
3155            self._increase_refresh_counter()
3156            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
3157            bucket = cast(IList, bucket)
3158            bucket.set_capacity(3)
3159            bucket.extend_with(3, 0)
3160            self.buckets[item_info_index] = bucket
3161            bucket[0] = 1
3162            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
3163            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
3164            self.hashmap[field_type_index] = 2
3165            self.hashmap[item_bucket_index] = bucket_offset
3166            bucket.append(1)
3167            bucket.append(item_hash)
3168            bucket.append(item)
3169            self._increase_size()
3170            return
3171        elif 2 == field_type:
3172            bucket_offset = self.hashmap[item_bucket_index]
3173            try:
3174                bucket = self.buckets[item_info_index]
3175                if bucket._offset != bucket_offset:
3176                    raise KeyError
3177            except KeyError:
3178                raise
3179                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3180
3181            bucket_len: int = len(bucket)
3182            for bucket_item_index in range(0, bucket_len, 3):
3183                bucket_field_type = bucket[bucket_item_index + 0]
3184                if 1 == bucket_field_type:
3185                    if (item_hash == bucket[bucket_item_index + 1]) and (item == bucket[bucket_item_index + 2]):
3186                        return
3187            
3188            for bucket_item_index in range(0, bucket_len, 3):
3189                bucket_field_type = bucket[bucket_item_index + 0]
3190                if 0 == bucket_field_type:
3191                    bucket[bucket_item_index + 0] = 1
3192                    bucket[bucket_item_index + 1] = item_hash
3193                    bucket[bucket_item_index + 2] = item
3194                    self._increase_size()
3195                    return
3196            else:
3197                bucket.append(1)
3198                bucket.append(item_hash)
3199                bucket.append(item)
3200                self._increase_size()
3201                return
3202        else:
3203            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3204
3205    def add_as_offset(self, value_hash, value_type, value_offset):
3206        """Add an element."""
3207        self._check_hashmap()
3208        item = (value_type, value_offset)
3209        item_hash = value_hash
3210        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3211        field_type_index = item_info_index + 0
3212        item_hash_index = item_info_index + 1
3213        item_bucket_index = item_info_index + 2
3214        field_type = self.hashmap[field_type_index]
3215        if 0 == field_type:
3216            self.hashmap[field_type_index] = 1
3217            self.hashmap[item_hash_index] = item_hash
3218            self.hashmap.setitem_as_offset(item_bucket_index, item)
3219            self._increase_size()
3220            return
3221        elif 1 == field_type:
3222            if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap.getitem_as_offset(item_bucket_index)):
3223                return
3224            
3225            self._increase_refresh_counter()
3226            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
3227            bucket = cast(IList, bucket)
3228            bucket.set_capacity(3)
3229            bucket.extend_with(3, 0)
3230            self.buckets[item_info_index] = bucket
3231            bucket[0] = 1
3232            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
3233            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
3234            self.hashmap[field_type_index] = 2
3235            self.hashmap[item_bucket_index] = bucket_offset
3236            bucket.append(1)
3237            bucket.append(item_hash)
3238            bucket.append_as_offset(item)
3239            self._increase_size()
3240            return
3241        elif 2 == field_type:
3242            bucket_offset = self.hashmap[item_bucket_index]
3243            try:
3244                bucket = self.buckets[item_info_index]
3245                if bucket._offset != bucket_offset:
3246                    raise KeyError
3247            except KeyError:
3248                raise
3249                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3250
3251            bucket_len: int = len(bucket)
3252            for bucket_item_index in range(0, bucket_len, 3):
3253                bucket_field_type = bucket[bucket_item_index + 0]
3254                if 1 == bucket_field_type:
3255                    if (item_hash == bucket[bucket_item_index + 1]) and (item == bucket.getitem_as_offset(bucket_item_index + 2)):
3256                        return
3257            
3258            for bucket_item_index in range(0, bucket_len, 3):
3259                bucket_field_type = bucket[bucket_item_index + 0]
3260                if 0 == bucket_field_type:
3261                    bucket[bucket_item_index + 0] = 1
3262                    bucket[bucket_item_index + 1] = item_hash
3263                    bucket.setitem_as_offset(bucket_item_index + 2, item)
3264                    self._increase_size()
3265                    return
3266            else:
3267                bucket.append(1)
3268                bucket.append(item_hash)
3269                bucket.append_as_offset(item)
3270                self._increase_size()
3271                return
3272        else:
3273            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3274
3275    def discard(self, value):
3276        """Remove an element.  Do not raise an exception if absent."""
3277        self._check_hashmap()
3278        obj = value
3279        item_hash = hash(obj)
3280        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3281        field_type_index = item_info_index + 0
3282        item_hash_index = item_info_index + 1
3283        item_bucket_index = item_info_index + 2
3284        field_type = self.hashmap[field_type_index]
3285        if 0 == field_type:
3286            return
3287        elif 1 == field_type:
3288            if (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]):
3289                self.hashmap[field_type_index] = 0
3290                self.hashmap[item_hash_index] = None
3291                self.hashmap[item_bucket_index] = None
3292                self._decrease_size()
3293                return
3294            else:
3295                return
3296        elif 2 == field_type:
3297            bucket_offset = self.hashmap[item_bucket_index]
3298            try:
3299                bucket = self.buckets[item_info_index]
3300                if bucket._offset != bucket_offset:
3301                    raise KeyError
3302            except KeyError:
3303                raise
3304                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3305
3306            for bucket_item_index in range(0, len(bucket), 3):
3307                bucket_field_type = bucket[bucket_item_index + 0]
3308                if 0 == bucket_field_type:
3309                    continue
3310                
3311                bucket_field_hash = bucket[bucket_item_index + 1]
3312                bucket_obj = bucket[bucket_item_index + 2]
3313                if (item_hash == bucket_field_hash) and (obj == bucket_obj):
3314                    bucket[bucket_item_index + 0] = 0
3315                    bucket[bucket_item_index + 1] = None
3316                    bucket[bucket_item_index + 2] = None
3317                    self._decrease_size()
3318                    return
3319            return
3320        else:
3321            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3322
3323    @property
3324    def hash_bits(self) -> int:
3325        return self._hash_bits
3326
3327    @hash_bits.setter
3328    def hash_bits(self, value: int) -> None:
3329        self._hash_bits = value
3330        self._capacity = 2 ** value
3331    
3332    @property
3333    def capacity(self) -> int:
3334        return self._capacity
3335
3336    @capacity.setter
3337    def capacity(self, value: int) -> None:
3338        if value <= self._capacity:
3339            return
3340        
3341        if value <= 2:
3342            self.hash_bits = 1
3343        else:
3344            self.hash_bits = int(ceil(log2(value)))
3345    
3346    def __str__(self) -> str:
3347        self._check_hashmap()
3348        return set(self).__str__()
3349
3350    def __repr__(self) -> str:
3351        self._check_hashmap()
3352        return set(self).__repr__()
3353
3354    def _free_mem(self):
3355        if self._offset is not None:
3356            for _, bucket in self.buckets.items():
3357                self._shared_memory.destroy_obj(bucket._offset)
3358            
3359            self.buckets.clear()
3360            if self.hashmap_offset is not None:
3361                self._shared_memory.destroy_obj(self.hashmap_offset)
3362                self.hashmap_offset = None
3363            
3364            self._shared_memory.free(self._offset)
3365            self._offset = None

A mutable set is a finite, iterable container.

This class provides concrete generic implementations of all methods except for __contains__, __iter__, __len__, add(), and discard().

To override the comparisons (presumably for speed, as the semantics are fixed), all you have to do is redefine __le__ and then the other operations will automatically follow suit.

IMutableSet( shared_memory: SharedMemory, offset: int = None, obj: collections.abc.MutableSet = None)
2784    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableSet = None) -> None:
2785        self._shared_memory = shared_memory
2786        self._base_address = shared_memory.base_address
2787        self._obj_size = None
2788        self._offset: Offset = None
2789        self._offset__data: Offset = None
2790        self._offset__size_offset: Offset = None
2791        self._offset__capacity_offset: Offset = None
2792        self._offset__hashmap_offset: Offset = None
2793        self._offset__refresh_counter_offset: Offset = None
2794        self._load_factor = 0.75
2795        self._load_factor_2 = 0.5625
2796        self._hash_bits: int = None
2797        self._capacity: int = None
2798        self._min_capacity: int = None
2799        self._size: int = None
2800        self.hashmap: IList = None
2801        self._refresh_counter: int = 0
2802        self.hashmap_offset: Offset = None
2803        self.buckets: Dict[int, IList] = dict()
2804
2805        self.ignore_rehash: bool = True
2806
2807        if offset is None:
2808            if obj is None:
2809                # obj = frozenset(set())
2810                data_len = 16
2811            else:
2812                data_len = len(obj)
2813
2814            self._size = 0
2815            self.hash_bits = 1
2816            self.capacity = int(ceil(data_len / self._load_factor))
2817            self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2818
2819            offset, self._obj_size = shared_memory.malloc(ObjectType.tmutableset, 32)
2820            try:
2821                self._offset = offset
2822                offset__data = offset + 16
2823                self._offset__data = offset__data
2824                self._offset__size_offset: Offset = offset__data + 0
2825                self._offset__capacity_offset: Offset = offset__data + 8
2826                self._offset__hashmap_offset = offset__data + 16
2827                self._offset__refresh_counter_offset = offset__data + 24
2828
2829                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
2830                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
2831                write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter)
2832
2833                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
2834                self.hashmap = cast(IList, self.hashmap)
2835                self.hashmap_offset = hashmap_offset
2836                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
2837                hashmap_capacity = self.capacity * 3
2838                self.hashmap.set_capacity(hashmap_capacity)
2839                self.hashmap.extend_with(hashmap_capacity, 0)
2840                hash_bits: int = self.hash_bits
2841                if obj is None:
2842                    pass
2843                elif isinstance(obj, IMutableSet):
2844                    self._move_from(obj)
2845                else:
2846                    for item in obj:
2847                        self.add(item)
2848                
2849                self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
2850                
2851                self.ignore_rehash = False
2852            except:
2853                self._free_mem()
2854                raise
2855        else:
2856            self._refresh_hashmap(offset)
2857            self.ignore_rehash = False
2858
2859            # self._offset = offset
2860            # offset__data = offset + 16
2861            # self._offset__data = offset__data
2862            # self._offset__size_offset: Offset = offset__data + 0
2863            # self._offset__capacity_offset: Offset = offset__data + 8
2864            # self._offset__hashmap_offset = offset__data + 16
2865
2866            # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
2867            # self.hash_bits = 1
2868            # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
2869            # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
2870            # self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
2871            
2872            # self.hashmap_offset = hashmap_offset
2873            # self.hashmap = IList(shared_memory, hashmap_offset)
2874            # item_info_index: int = 0
2875            # # for item_info_index in range(self.capacity):
2876            # #     field_type_index = item_info_index * 3 + 0
2877            # #     item_hash_index = item_info_index * 3 + 1
2878            # #     item_bucket_index = item_info_index * 3 + 2
2879            # #     field_type = self.hashmap[field_type_index]
2880            # #     if 0 == field_type:
2881            # #         continue
2882            # #     elif 1 == field_type:
2883            # #         continue
2884            # #     elif 2 == field_type:
2885            # #         bucket_offset = self.hashmap[item_bucket_index]
2886            # #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2887            # #     else:
2888            # #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2889
2890            # for item_info_index in range(0, self.capacity * 3, 3):
2891            #     field_type_index = item_info_index + 0
2892            #     item_hash_index = item_info_index + 1
2893            #     item_bucket_index = item_info_index + 2
2894            #     field_type = self.hashmap[field_type_index]
2895            #     if 0 == field_type:
2896            #         continue
2897            #     elif 1 == field_type:
2898            #         continue
2899            #     elif 2 == field_type:
2900            #         bucket_offset = self.hashmap[item_bucket_index]
2901            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
2902            #     else:
2903            #         raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
2904
2905            # self.ignore_rehash = False
hashmap: IList
hashmap_offset: int
buckets: Dict[int, IList]
ignore_rehash: bool
refresh_counter
2971    @property
2972    def refresh_counter(self):
2973        return read_uint64(self._base_address, self._offset__refresh_counter_offset)
def iter_offset(self):
3091    def iter_offset(self):
3092        self._check_hashmap()
3093        return IMutableSetIteratorAsOffset(self)
def iter_offset_pop(self):
3095    def iter_offset_pop(self):
3096        self._check_hashmap()
3097        return IMutableSetIteratorAsOffset(self, True)
def add(self, value):
3135    def add(self, value):
3136        """Add an element."""
3137        self._check_hashmap()
3138        item = value
3139        item_hash = hash(item)
3140        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3141        field_type_index = item_info_index + 0
3142        item_hash_index = item_info_index + 1
3143        item_bucket_index = item_info_index + 2
3144        field_type = self.hashmap[field_type_index]
3145        if 0 == field_type:
3146            self.hashmap[field_type_index] = 1
3147            self.hashmap[item_hash_index] = item_hash
3148            self.hashmap[item_bucket_index] = item
3149            self._increase_size()
3150            return
3151        elif 1 == field_type:
3152            if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap[item_bucket_index]):
3153                return
3154            
3155            self._increase_refresh_counter()
3156            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
3157            bucket = cast(IList, bucket)
3158            bucket.set_capacity(3)
3159            bucket.extend_with(3, 0)
3160            self.buckets[item_info_index] = bucket
3161            bucket[0] = 1
3162            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
3163            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
3164            self.hashmap[field_type_index] = 2
3165            self.hashmap[item_bucket_index] = bucket_offset
3166            bucket.append(1)
3167            bucket.append(item_hash)
3168            bucket.append(item)
3169            self._increase_size()
3170            return
3171        elif 2 == field_type:
3172            bucket_offset = self.hashmap[item_bucket_index]
3173            try:
3174                bucket = self.buckets[item_info_index]
3175                if bucket._offset != bucket_offset:
3176                    raise KeyError
3177            except KeyError:
3178                raise
3179                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3180
3181            bucket_len: int = len(bucket)
3182            for bucket_item_index in range(0, bucket_len, 3):
3183                bucket_field_type = bucket[bucket_item_index + 0]
3184                if 1 == bucket_field_type:
3185                    if (item_hash == bucket[bucket_item_index + 1]) and (item == bucket[bucket_item_index + 2]):
3186                        return
3187            
3188            for bucket_item_index in range(0, bucket_len, 3):
3189                bucket_field_type = bucket[bucket_item_index + 0]
3190                if 0 == bucket_field_type:
3191                    bucket[bucket_item_index + 0] = 1
3192                    bucket[bucket_item_index + 1] = item_hash
3193                    bucket[bucket_item_index + 2] = item
3194                    self._increase_size()
3195                    return
3196            else:
3197                bucket.append(1)
3198                bucket.append(item_hash)
3199                bucket.append(item)
3200                self._increase_size()
3201                return
3202        else:
3203            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')

Add an element.

def add_as_offset(self, value_hash, value_type, value_offset):
3205    def add_as_offset(self, value_hash, value_type, value_offset):
3206        """Add an element."""
3207        self._check_hashmap()
3208        item = (value_type, value_offset)
3209        item_hash = value_hash
3210        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3211        field_type_index = item_info_index + 0
3212        item_hash_index = item_info_index + 1
3213        item_bucket_index = item_info_index + 2
3214        field_type = self.hashmap[field_type_index]
3215        if 0 == field_type:
3216            self.hashmap[field_type_index] = 1
3217            self.hashmap[item_hash_index] = item_hash
3218            self.hashmap.setitem_as_offset(item_bucket_index, item)
3219            self._increase_size()
3220            return
3221        elif 1 == field_type:
3222            if (item_hash == self.hashmap[item_hash_index]) and (item == self.hashmap.getitem_as_offset(item_bucket_index)):
3223                return
3224            
3225            self._increase_refresh_counter()
3226            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
3227            bucket = cast(IList, bucket)
3228            bucket.set_capacity(3)
3229            bucket.extend_with(3, 0)
3230            self.buckets[item_info_index] = bucket
3231            bucket[0] = 1
3232            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
3233            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
3234            self.hashmap[field_type_index] = 2
3235            self.hashmap[item_bucket_index] = bucket_offset
3236            bucket.append(1)
3237            bucket.append(item_hash)
3238            bucket.append_as_offset(item)
3239            self._increase_size()
3240            return
3241        elif 2 == field_type:
3242            bucket_offset = self.hashmap[item_bucket_index]
3243            try:
3244                bucket = self.buckets[item_info_index]
3245                if bucket._offset != bucket_offset:
3246                    raise KeyError
3247            except KeyError:
3248                raise
3249                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3250
3251            bucket_len: int = len(bucket)
3252            for bucket_item_index in range(0, bucket_len, 3):
3253                bucket_field_type = bucket[bucket_item_index + 0]
3254                if 1 == bucket_field_type:
3255                    if (item_hash == bucket[bucket_item_index + 1]) and (item == bucket.getitem_as_offset(bucket_item_index + 2)):
3256                        return
3257            
3258            for bucket_item_index in range(0, bucket_len, 3):
3259                bucket_field_type = bucket[bucket_item_index + 0]
3260                if 0 == bucket_field_type:
3261                    bucket[bucket_item_index + 0] = 1
3262                    bucket[bucket_item_index + 1] = item_hash
3263                    bucket.setitem_as_offset(bucket_item_index + 2, item)
3264                    self._increase_size()
3265                    return
3266            else:
3267                bucket.append(1)
3268                bucket.append(item_hash)
3269                bucket.append_as_offset(item)
3270                self._increase_size()
3271                return
3272        else:
3273            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')

Add an element.

def discard(self, value):
3275    def discard(self, value):
3276        """Remove an element.  Do not raise an exception if absent."""
3277        self._check_hashmap()
3278        obj = value
3279        item_hash = hash(obj)
3280        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 3
3281        field_type_index = item_info_index + 0
3282        item_hash_index = item_info_index + 1
3283        item_bucket_index = item_info_index + 2
3284        field_type = self.hashmap[field_type_index]
3285        if 0 == field_type:
3286            return
3287        elif 1 == field_type:
3288            if (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index]):
3289                self.hashmap[field_type_index] = 0
3290                self.hashmap[item_hash_index] = None
3291                self.hashmap[item_bucket_index] = None
3292                self._decrease_size()
3293                return
3294            else:
3295                return
3296        elif 2 == field_type:
3297            bucket_offset = self.hashmap[item_bucket_index]
3298            try:
3299                bucket = self.buckets[item_info_index]
3300                if bucket._offset != bucket_offset:
3301                    raise KeyError
3302            except KeyError:
3303                raise
3304                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
3305
3306            for bucket_item_index in range(0, len(bucket), 3):
3307                bucket_field_type = bucket[bucket_item_index + 0]
3308                if 0 == bucket_field_type:
3309                    continue
3310                
3311                bucket_field_hash = bucket[bucket_item_index + 1]
3312                bucket_obj = bucket[bucket_item_index + 2]
3313                if (item_hash == bucket_field_hash) and (obj == bucket_obj):
3314                    bucket[bucket_item_index + 0] = 0
3315                    bucket[bucket_item_index + 1] = None
3316                    bucket[bucket_item_index + 2] = None
3317                    self._decrease_size()
3318                    return
3319            return
3320        else:
3321            raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')

Remove an element. Do not raise an exception if absent.

hash_bits: int
3323    @property
3324    def hash_bits(self) -> int:
3325        return self._hash_bits
capacity: int
3332    @property
3333    def capacity(self) -> int:
3334        return self._capacity
Inherited Members
collections.abc.MutableSet
remove
pop
clear
collections.abc.Set
isdisjoint
class IMutableSetIterator:
3368class IMutableSetIterator:
3369    def __init__(self, iset: IMutableSet) -> None:
3370        self._iset = iset
3371        self._index = 0
3372        self._sub_index = 0
3373    
3374    def __next__(self):
3375        if self._iset._check_hashmap():
3376            raise RuntimeError("Sets's hashmap changed during iteration")
3377
3378        while self._index < self._iset.capacity:
3379            item_info_index: int = self._index * 3
3380            field_type_index = item_info_index + 0
3381            item_hash_index = item_info_index + 1
3382            item_bucket_index = item_info_index + 2
3383            field_type = self._iset.hashmap[field_type_index]
3384            if 0 == field_type:
3385                self._index += 1
3386                continue
3387            elif 1 == field_type:
3388                result = self._iset.hashmap[item_bucket_index]
3389                self._index += 1
3390                return result
3391            elif 2 == field_type:
3392                bucket_offset = self._iset.hashmap[item_bucket_index]
3393                try:
3394                    bucket = self._iset.buckets[item_info_index]
3395                    if bucket._offset != bucket_offset:
3396                        raise KeyError
3397                except KeyError:
3398                    raise
3399                    self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset)
3400
3401                bucket_len = len(bucket)
3402                sub_item_info_index = self._sub_index
3403                while (sub_item_info_index * 3) < bucket_len:
3404                    sub_item_field_type_index = sub_item_info_index * 3 + 0
3405                    if bucket[sub_item_field_type_index] == 0:
3406                        sub_item_info_index += 1
3407                        continue
3408
3409                    sub_item_hash_index = sub_item_info_index * 3 + 1
3410                    sub_item_obj_index = sub_item_info_index * 3 + 2
3411                    result = bucket[sub_item_obj_index]
3412                    self._sub_index += 1
3413                    return result
3414                else:
3415                    self._sub_index = 0
3416                    self._index += 1
3417                    continue
3418            else:
3419                raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3420        else:
3421            raise StopIteration
3422    
3423    def __iter__(self):
3424        return self
IMutableSetIterator( iset: IMutableSet)
3369    def __init__(self, iset: IMutableSet) -> None:
3370        self._iset = iset
3371        self._index = 0
3372        self._sub_index = 0
class IMutableSetIteratorAsOffset:
3427class IMutableSetIteratorAsOffset:
3428    def __init__(self, iset: IMutableSet, pop: bool = False) -> None:
3429        self._iset = iset
3430        self._pop: bool = pop
3431        self._index = 0
3432        self._sub_index = 0
3433    
3434    def __next__(self):
3435        if self._iset._check_hashmap():
3436            raise RuntimeError("Set's hashmap changed during iteration")
3437
3438        if self._index < self._iset.capacity:
3439            while self._index < self._iset.capacity:
3440                item_info_index: int = self._index * 3
3441                field_type_index = item_info_index + 0
3442                item_hash_index = item_info_index + 1
3443                item_bucket_index = item_info_index + 2
3444                field_type = self._iset.hashmap[field_type_index]
3445                if 0 == field_type:
3446                    self._index += 1
3447                    continue
3448                elif 1 == field_type:
3449                    item_hash = self._iset.hashmap[item_hash_index]
3450                    value_type, value_offset = self._iset.hashmap.getitem_as_offset(item_bucket_index)
3451                    if self._pop:
3452                        self._iset.hashmap[field_type_index] = 0
3453                        self._iset.hashmap[item_hash_index] = None
3454                        self._iset.hashmap.setitem_as_offset(item_bucket_index, (0, 0), False)
3455                    
3456                    self._index += 1
3457                    return (item_hash, value_type, value_offset)
3458                elif 2 == field_type:
3459                    bucket_offset = self._iset.hashmap[item_bucket_index]
3460                    try:
3461                        bucket = self._iset.buckets[item_info_index]
3462                        if bucket._offset != bucket_offset:
3463                            raise KeyError
3464                    except KeyError:
3465                        raise
3466                        self._iset.buckets[item_info_index] = bucket = IList(self._iset._shared_memory, bucket_offset)
3467
3468                    bucket_len = len(bucket)
3469                    sub_item_info_index = self._sub_index
3470                    while (sub_item_info_index * 3) < bucket_len:
3471                        sub_item_field_type_index = sub_item_info_index * 3 + 0
3472                        if bucket[sub_item_field_type_index] == 0:
3473                            sub_item_info_index += 1
3474                            continue
3475
3476                        sub_item_hash_index = sub_item_info_index * 3 + 1
3477                        sub_item_obj_index = sub_item_info_index * 3 + 2
3478                        sub_item_hash = bucket[sub_item_hash_index]
3479                        sub_item_value_type, sub_item_value_offset = bucket.getitem_as_offset(sub_item_obj_index)
3480                        if self._pop:
3481                            bucket[sub_item_field_type_index] = 0
3482                            bucket[sub_item_hash_index] = None
3483                            bucket.setitem_as_offset(sub_item_obj_index, (0, 0), False)
3484                        
3485                        self._sub_index += 1
3486                        return (sub_item_hash, sub_item_value_type, sub_item_value_offset)
3487                    else:
3488                        self._sub_index = 0
3489                        self._index += 1
3490                        continue
3491                else:
3492                    raise ValueError(f'Unknown MutableSetHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3493            else:
3494                raise StopIteration
3495        else:
3496            raise StopIteration
3497    
3498    def __iter__(self):
3499        return self
IMutableSetIteratorAsOffset( iset: IMutableSet, pop: bool = False)
3428    def __init__(self, iset: IMutableSet, pop: bool = False) -> None:
3429        self._iset = iset
3430        self._pop: bool = pop
3431        self._index = 0
3432        self._sub_index = 0
class TMutableSet:
3502class TMutableSet:
3503    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[IMutableSet, Offset, Size]:
3504        obj: IMutableSet = IMutableSet(shared_memory, obj=obj)
3505        return obj, obj._offset, obj._obj_size
3506    
3507    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableSet:
3508        if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset):
3509            raise WrongObjectTypeError
3510        
3511        return IMutableSet(shared_memory, offset)
3512    
3513    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
3514        if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset):
3515            raise WrongObjectTypeError
3516        
3517        obj: IMutableSet = IMutableSet(shared_memory, offset)
3518        obj._free_mem()
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: set) -> Tuple[IMutableSet, int, int]:
3503    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: set) -> Tuple[IMutableSet, Offset, Size]:
3504        obj: IMutableSet = IMutableSet(shared_memory, obj=obj)
3505        return obj, obj._offset, obj._obj_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> IMutableSet:
3507    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableSet:
3508        if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset):
3509            raise WrongObjectTypeError
3510        
3511        return IMutableSet(shared_memory, offset)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
3513    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
3514        if ObjectType.tmutableset != read_uint64(shared_memory.base_address, offset):
3515            raise WrongObjectTypeError
3516        
3517        obj: IMutableSet = IMutableSet(shared_memory, offset)
3518        obj._free_mem()
class ForceMapping(builtins.dict):
3525class ForceMapping(dict):
3526    ...
Inherited Members
builtins.dict
get
setdefault
pop
popitem
keys
items
values
update
fromkeys
clear
copy
FMapping = <class 'ForceMapping'>
forcemapping = <class 'ForceMapping'>
fmapping = <class 'ForceMapping'>
class MappingOffsets(enum.IntEnum):
3534class MappingOffsets(IntEnum):
3535    size = 0
3536    capacity = 1
3537    hashmap_offset = 2

An enumeration.

size = <MappingOffsets.size: 0>
capacity = <MappingOffsets.capacity: 1>
hashmap_offset = <MappingOffsets.hashmap_offset: 2>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MappingHashmapFieldTypes(enum.IntEnum):
3540class MappingHashmapFieldTypes(IntEnum):
3541    tnone = 0
3542    tobj = 1
3543    tbucket = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MappingHashmapItemOffsets(enum.IntEnum):
3546class MappingHashmapItemOffsets(IntEnum):
3547    field_type = 0
3548    field_hash = 1
3549    key_or_bucket = 2
3550    value_or_none = 3

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MappingBucketOffsets(enum.IntEnum):
3553class MappingBucketOffsets(IntEnum):
3554    field_hash = 0
3555    key_obj = 1
3556    value_obj = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class IMapping(BaseIObject, collections.abc.Mapping):
3559class IMapping(BaseIObject, AbsMapping):
3560    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMapping = None) -> None:
3561        self._shared_memory = shared_memory
3562        self._base_address = shared_memory.base_address
3563        self._obj_size = None
3564        self._offset: Offset = None
3565        self._offset__data: Offset = None
3566        self._offset__size_offset: Offset = None
3567        self._offset__capacity_offset: Offset = None
3568        self._offset__hashmap_offset: Offset = None
3569        self._load_factor = 0.75
3570        self._hash_bits: int = None
3571        self._capacity: int = None
3572        self._size: int = None
3573        self.hashmap: IList = None
3574        self.hashmap_offset: Offset = None
3575        self.buckets: Dict[int, IList] = dict()
3576
3577        if offset is None:
3578            if obj is None:
3579                # obj = frozenset(set())
3580                data_len = 16
3581            else:
3582                data_len = len(obj)
3583
3584            self._size: int = data_len
3585            self.hash_bits = 1
3586            self.capacity = int(ceil(data_len / self._load_factor))
3587
3588            offset, self._obj_size = shared_memory.malloc(ObjectType.tmapping, 24)
3589            try:
3590                self._offset = offset
3591                offset__data = offset + 16
3592                self._offset__data = offset__data
3593                self._offset__size_offset: Offset = offset__data + 0
3594                self._offset__capacity_offset: Offset = offset__data + 8
3595                self._offset__hashmap_offset = offset__data + 16
3596
3597                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
3598                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
3599
3600                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
3601                self.hashmap = cast(IList, self.hashmap)
3602                self.hashmap_offset = hashmap_offset
3603                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
3604                hashmap_capacity = self.capacity * 4
3605                self.hashmap.set_capacity(hashmap_capacity)
3606                self.hashmap.extend_with(hashmap_capacity, 0)
3607                hash_bits: int = self.hash_bits
3608                if obj is not None:
3609                    for key, value in obj.items():
3610                        key_hash = hash(key)
3611                        item_info_index: int = mask_least_significant_bits(key_hash, hash_bits) * 4
3612                        field_type_index = item_info_index + 0
3613                        item_hash_index = item_info_index + 1
3614                        item_bucket_index = item_info_index + 2
3615                        item_value_index = item_info_index + 3
3616                        field_type = self.hashmap[field_type_index]
3617                        if 0 == field_type:
3618                            self.hashmap[field_type_index] = 1
3619                            self.hashmap[item_hash_index] = key_hash
3620                            self.hashmap[item_bucket_index] = key
3621                            self.hashmap[item_value_index] = value
3622                        elif 1 == field_type:
3623                            bucket, bucket_offset, _ = shared_memory.put_obj(list())
3624                            bucket = cast(IList, bucket)
3625                            bucket.set_capacity(3)
3626                            bucket.extend_with(3, 0)
3627                            self.buckets[item_info_index] = bucket
3628                            self.hashmap.move_item_to_list(item_hash_index, bucket, 0)
3629                            self.hashmap.move_item_to_list(item_bucket_index, bucket, 1)
3630                            self.hashmap.move_item_to_list(item_value_index, bucket, 2)
3631                            self.hashmap[field_type_index] = 2
3632                            self.hashmap[item_bucket_index] = bucket_offset
3633                            bucket.append(key_hash)
3634                            bucket.append(key)
3635                            bucket.append(value)
3636                        elif 2 == field_type:
3637                            bucket = self.buckets[item_info_index]
3638                            bucket.append(key_hash)
3639                            bucket.append(key)
3640                            bucket.append(value)
3641                        else:
3642                            raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3643
3644                # print(f'Constructed {self.hashmap=}')
3645                # print(f'\tConstructed buckets:')
3646                # pdi(self.buckets)
3647                # for bucket_index, bucket in self.buckets.items():
3648                #     pdi(bucket)
3649                #     print(f'\t\t{bucket_index}:', bucket)
3650            except:
3651                self._free_mem()
3652                raise
3653        else:
3654            self._offset = offset
3655            offset__data = offset + 16
3656            self._offset__data = offset__data
3657            self._offset__size_offset: Offset = offset__data + 0
3658            self._offset__capacity_offset: Offset = offset__data + 8
3659            self._offset__hashmap_offset = offset__data + 16
3660
3661            self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
3662            self.hash_bits = 1
3663            self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
3664            hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
3665            
3666            self.hashmap_offset = hashmap_offset
3667            self.hashmap = IList(shared_memory, hashmap_offset)
3668            # print(f'Adopted by {type(self)}: {self.hashmap=}')
3669            item_info_index: int = 0
3670            # for item_info_index in range(self.capacity):
3671            #     field_type_index = item_info_index * 4 + 0
3672            #     item_hash_index = item_info_index * 4 + 1
3673            #     item_bucket_index = item_info_index * 4 + 2
3674            #     item_value_index = item_info_index * 4 + 3
3675            #     field_type = self.hashmap[field_type_index]
3676            #     if 0 == field_type:
3677            #         continue
3678            #     elif 1 == field_type:
3679            #         continue
3680            #     elif 2 == field_type:
3681            #         bucket_offset = self.hashmap[item_bucket_index]
3682            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
3683            #     else:
3684            #         raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3685
3686            for item_info_index in range(0, self.capacity * 4, 4):
3687                field_type_index = item_info_index + 0
3688                item_hash_index = item_info_index + 1
3689                item_bucket_index = item_info_index + 2
3690                item_value_index = item_info_index + 3
3691                field_type = self.hashmap[field_type_index]
3692                if 0 == field_type:
3693                    continue
3694                elif 1 == field_type:
3695                    continue
3696                elif 2 == field_type:
3697                    bucket_offset = self.hashmap[item_bucket_index]
3698                    self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
3699                else:
3700                    raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3701
3702            # print(f'\tAdopted buckets:')
3703            # pdi(self.buckets)
3704            # for bucket_index, bucket in self.buckets.items():
3705            #     pdi(bucket)
3706            #     print(f'\t\t{bucket_index}:', bucket)
3707
3708    def __len__(self):
3709        return self._size
3710    
3711    def __iter__(self):
3712        return IMappingIterator(self)
3713    
3714    # def __contains__(self, obj: Hashable) -> bool:
3715    #     item_hash = hash(obj)
3716    #     item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits)
3717    #     field_type_index = item_info_index * 4 + 0
3718    #     item_hash_index = item_info_index * 4 + 1
3719    #     item_bucket_index = item_info_index * 4 + 2
3720    #     item_value_index = item_info_index * 4 + 3
3721    #     field_type = self.hashmap[field_type_index]
3722    #     if 0 == field_type:
3723    #         return False
3724    #     elif 1 == field_type:
3725    #         return (item_hash == self.hashmap[item_hash_index]) and (obj == self.hashmap[item_bucket_index])
3726    #     elif 2 == field_type:
3727    #         bucket = self.buckets[item_info_index]
3728    #         # for sub_item_info_index in range(len(bucket)):
3729    #         for sub_item_info_index in range(0, len(bucket) * 3, 3):
3730    #             sub_item_hash_index = sub_item_info_index + 0
3731    #             sub_item_key_obj_index = sub_item_info_index + 1
3732    #             sub_item_value_obj_index = sub_item_info_index + 2
3733    #             if (item_hash == bucket[sub_item_hash_index]) and (obj == bucket[sub_item_key_obj_index]):
3734    #                 return True
3735            
3736    #         return False
3737    #     else:
3738    #         raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3739
3740    def __getitem__(self, key: Hashable):
3741        item_hash = hash(key)
3742        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 4
3743        field_type_index = item_info_index + 0
3744        item_hash_index = item_info_index + 1
3745        item_bucket_index = item_info_index + 2
3746        item_value_index = item_info_index + 3
3747        field_type = self.hashmap[field_type_index]
3748        if 0 == field_type:
3749            raise KeyError
3750        elif 1 == field_type:
3751            if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
3752                return self.hashmap[item_value_index]
3753            else:
3754                raise KeyError
3755        elif 2 == field_type:
3756            bucket = self.buckets[item_info_index]
3757            # for sub_item_info_index in range(len(bucket)):
3758            for sub_item_info_index in range(0, len(bucket) * 3, 3):
3759                sub_item_hash_index = sub_item_info_index + 0
3760                sub_item_key_obj_index = sub_item_info_index + 1
3761                sub_item_value_obj_index = sub_item_info_index + 2
3762                if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
3763                    return bucket[sub_item_value_obj_index]
3764            
3765            raise KeyError
3766        else:
3767            raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3768
3769    @property
3770    def hash_bits(self) -> int:
3771        return self._hash_bits
3772
3773    @hash_bits.setter
3774    def hash_bits(self, value: int) -> None:
3775        self._hash_bits = value
3776        self._capacity = 2 ** value
3777    
3778    @property
3779    def capacity(self) -> int:
3780        return self._capacity
3781
3782    @capacity.setter
3783    def capacity(self, value: int) -> None:
3784        if value <= self._capacity:
3785            return
3786        
3787        if value <= 2:
3788            self.hash_bits = 1
3789        else:
3790            self.hash_bits = int(ceil(log2(value)))
3791    
3792    def __str__(self) -> str:
3793        return dict(self).__str__()
3794
3795    def __repr__(self) -> str:
3796        return dict(self).__repr__()
3797
3798    def _free_mem(self):
3799        if self._offset is not None:
3800            for _, bucket in self.buckets.items():
3801                self._shared_memory.destroy_obj(bucket._offset)
3802            
3803            self.buckets.clear()
3804            if self.hashmap_offset is not None:
3805                self._shared_memory.destroy_obj(self.hashmap_offset)
3806                self.hashmap_offset = None
3807            
3808            self._shared_memory.free(self._offset)
3809            self._offset = None
IMapping( shared_memory: SharedMemory, offset: int = None, obj: collections.abc.Mapping = None)
3560    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMapping = None) -> None:
3561        self._shared_memory = shared_memory
3562        self._base_address = shared_memory.base_address
3563        self._obj_size = None
3564        self._offset: Offset = None
3565        self._offset__data: Offset = None
3566        self._offset__size_offset: Offset = None
3567        self._offset__capacity_offset: Offset = None
3568        self._offset__hashmap_offset: Offset = None
3569        self._load_factor = 0.75
3570        self._hash_bits: int = None
3571        self._capacity: int = None
3572        self._size: int = None
3573        self.hashmap: IList = None
3574        self.hashmap_offset: Offset = None
3575        self.buckets: Dict[int, IList] = dict()
3576
3577        if offset is None:
3578            if obj is None:
3579                # obj = frozenset(set())
3580                data_len = 16
3581            else:
3582                data_len = len(obj)
3583
3584            self._size: int = data_len
3585            self.hash_bits = 1
3586            self.capacity = int(ceil(data_len / self._load_factor))
3587
3588            offset, self._obj_size = shared_memory.malloc(ObjectType.tmapping, 24)
3589            try:
3590                self._offset = offset
3591                offset__data = offset + 16
3592                self._offset__data = offset__data
3593                self._offset__size_offset: Offset = offset__data + 0
3594                self._offset__capacity_offset: Offset = offset__data + 8
3595                self._offset__hashmap_offset = offset__data + 16
3596
3597                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
3598                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
3599
3600                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
3601                self.hashmap = cast(IList, self.hashmap)
3602                self.hashmap_offset = hashmap_offset
3603                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
3604                hashmap_capacity = self.capacity * 4
3605                self.hashmap.set_capacity(hashmap_capacity)
3606                self.hashmap.extend_with(hashmap_capacity, 0)
3607                hash_bits: int = self.hash_bits
3608                if obj is not None:
3609                    for key, value in obj.items():
3610                        key_hash = hash(key)
3611                        item_info_index: int = mask_least_significant_bits(key_hash, hash_bits) * 4
3612                        field_type_index = item_info_index + 0
3613                        item_hash_index = item_info_index + 1
3614                        item_bucket_index = item_info_index + 2
3615                        item_value_index = item_info_index + 3
3616                        field_type = self.hashmap[field_type_index]
3617                        if 0 == field_type:
3618                            self.hashmap[field_type_index] = 1
3619                            self.hashmap[item_hash_index] = key_hash
3620                            self.hashmap[item_bucket_index] = key
3621                            self.hashmap[item_value_index] = value
3622                        elif 1 == field_type:
3623                            bucket, bucket_offset, _ = shared_memory.put_obj(list())
3624                            bucket = cast(IList, bucket)
3625                            bucket.set_capacity(3)
3626                            bucket.extend_with(3, 0)
3627                            self.buckets[item_info_index] = bucket
3628                            self.hashmap.move_item_to_list(item_hash_index, bucket, 0)
3629                            self.hashmap.move_item_to_list(item_bucket_index, bucket, 1)
3630                            self.hashmap.move_item_to_list(item_value_index, bucket, 2)
3631                            self.hashmap[field_type_index] = 2
3632                            self.hashmap[item_bucket_index] = bucket_offset
3633                            bucket.append(key_hash)
3634                            bucket.append(key)
3635                            bucket.append(value)
3636                        elif 2 == field_type:
3637                            bucket = self.buckets[item_info_index]
3638                            bucket.append(key_hash)
3639                            bucket.append(key)
3640                            bucket.append(value)
3641                        else:
3642                            raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3643
3644                # print(f'Constructed {self.hashmap=}')
3645                # print(f'\tConstructed buckets:')
3646                # pdi(self.buckets)
3647                # for bucket_index, bucket in self.buckets.items():
3648                #     pdi(bucket)
3649                #     print(f'\t\t{bucket_index}:', bucket)
3650            except:
3651                self._free_mem()
3652                raise
3653        else:
3654            self._offset = offset
3655            offset__data = offset + 16
3656            self._offset__data = offset__data
3657            self._offset__size_offset: Offset = offset__data + 0
3658            self._offset__capacity_offset: Offset = offset__data + 8
3659            self._offset__hashmap_offset = offset__data + 16
3660
3661            self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
3662            self.hash_bits = 1
3663            self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
3664            hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
3665            
3666            self.hashmap_offset = hashmap_offset
3667            self.hashmap = IList(shared_memory, hashmap_offset)
3668            # print(f'Adopted by {type(self)}: {self.hashmap=}')
3669            item_info_index: int = 0
3670            # for item_info_index in range(self.capacity):
3671            #     field_type_index = item_info_index * 4 + 0
3672            #     item_hash_index = item_info_index * 4 + 1
3673            #     item_bucket_index = item_info_index * 4 + 2
3674            #     item_value_index = item_info_index * 4 + 3
3675            #     field_type = self.hashmap[field_type_index]
3676            #     if 0 == field_type:
3677            #         continue
3678            #     elif 1 == field_type:
3679            #         continue
3680            #     elif 2 == field_type:
3681            #         bucket_offset = self.hashmap[item_bucket_index]
3682            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
3683            #     else:
3684            #         raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3685
3686            for item_info_index in range(0, self.capacity * 4, 4):
3687                field_type_index = item_info_index + 0
3688                item_hash_index = item_info_index + 1
3689                item_bucket_index = item_info_index + 2
3690                item_value_index = item_info_index + 3
3691                field_type = self.hashmap[field_type_index]
3692                if 0 == field_type:
3693                    continue
3694                elif 1 == field_type:
3695                    continue
3696                elif 2 == field_type:
3697                    bucket_offset = self.hashmap[item_bucket_index]
3698                    self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
3699                else:
3700                    raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3701
3702            # print(f'\tAdopted buckets:')
3703            # pdi(self.buckets)
3704            # for bucket_index, bucket in self.buckets.items():
3705            #     pdi(bucket)
3706            #     print(f'\t\t{bucket_index}:', bucket)
hashmap: IList
hashmap_offset: int
buckets: Dict[int, IList]
hash_bits: int
3769    @property
3770    def hash_bits(self) -> int:
3771        return self._hash_bits
capacity: int
3778    @property
3779    def capacity(self) -> int:
3780        return self._capacity
Inherited Members
collections.abc.Mapping
get
keys
items
values
class IMappingIterator:
3812class IMappingIterator:
3813    def __init__(self, imapping: IMapping) -> None:
3814        self._imapping = imapping
3815        self._index = 0
3816        self._sub_index = 0
3817    
3818    def __next__(self):
3819        while self._index < self._imapping.capacity:
3820            item_info_index: int = self._index * 4
3821            field_type_index = item_info_index + 0
3822            item_hash_index = item_info_index + 1
3823            item_bucket_index = item_info_index + 2
3824            item_value_index = item_info_index + 3
3825            field_type = self._imapping.hashmap[field_type_index]
3826            if 0 == field_type:
3827                self._index += 1
3828                continue
3829            elif 1 == field_type:
3830                result = self._imapping.hashmap[item_bucket_index]
3831                self._index += 1
3832                break
3833            elif 2 == field_type:
3834                bucket = self._imapping.buckets[item_info_index]
3835                sub_item_info_index = self._sub_index
3836                sub_item_hash_index = sub_item_info_index * 3 + 0
3837                sub_item_key_obj_index = sub_item_info_index * 3 + 1
3838                sub_item_value_obj_index = sub_item_info_index * 3 + 2
3839                if (sub_item_info_index * 3) >= len(bucket):
3840                    self._sub_index = 0
3841                    self._index += 1
3842                    continue
3843
3844                result = bucket[sub_item_key_obj_index]
3845                self._sub_index += 1
3846                break
3847            else:
3848                raise ValueError(f'Unknown MappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
3849        else:
3850            raise StopIteration
3851
3852        return result
3853    
3854    def __iter__(self):
3855        return self
IMappingIterator( imapping: IMapping)
3813    def __init__(self, imapping: IMapping) -> None:
3814        self._imapping = imapping
3815        self._index = 0
3816        self._sub_index = 0
class TMapping:
3858class TMapping:
3859    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMapping) -> Tuple[IMapping, Offset, Size]:
3860        obj: IMapping = IMapping(shared_memory, obj=obj)
3861        return obj, obj._offset, obj._obj_size
3862    
3863    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMapping:
3864        if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset):
3865            raise WrongObjectTypeError
3866        
3867        return IMapping(shared_memory, offset)
3868    
3869    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
3870        if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset):
3871            raise WrongObjectTypeError
3872        
3873        obj: IMapping = IMapping(shared_memory, offset)
3874        obj._free_mem()
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: collections.abc.Mapping) -> Tuple[IMapping, int, int]:
3859    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMapping) -> Tuple[IMapping, Offset, Size]:
3860        obj: IMapping = IMapping(shared_memory, obj=obj)
3861        return obj, obj._offset, obj._obj_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> IMapping:
3863    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMapping:
3864        if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset):
3865            raise WrongObjectTypeError
3866        
3867        return IMapping(shared_memory, offset)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
3869    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
3870        if ObjectType.tmapping != read_uint64(shared_memory.base_address, offset):
3871            raise WrongObjectTypeError
3872        
3873        obj: IMapping = IMapping(shared_memory, offset)
3874        obj._free_mem()
class MutableMappingOffsets(enum.IntEnum):
3881class MutableMappingOffsets(IntEnum):
3882    size = 0
3883    capacity = 1
3884    hashmap_offset = 2
3885    refresh_counter = 3

An enumeration.

hashmap_offset = <MutableMappingOffsets.hashmap_offset: 2>
refresh_counter = <MutableMappingOffsets.refresh_counter: 3>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableMappingHashmapFieldTypes(enum.IntEnum):
3888class MutableMappingHashmapFieldTypes(IntEnum):
3889    tnone = 0
3890    tobj = 1
3891    tbucket = 2

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableMappingHashmapItemOffsets(enum.IntEnum):
3894class MutableMappingHashmapItemOffsets(IntEnum):
3895    field_type = 0
3896    field_hash = 1
3897    key_or_bucket = 2
3898    value_or_none = 3

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableMappingBucketFieldTypes(enum.IntEnum):
3901class MutableMappingBucketFieldTypes(IntEnum):
3902    tnone = 0
3903    tobj = 1

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class MutableMappingBucketOffsets(enum.IntEnum):
3906class MutableMappingBucketOffsets(IntEnum):
3907    field_type = 0
3908    field_hash = 1
3909    key_obj = 2
3910    value_obj = 3

An enumeration.

Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class IMutableMapping(BaseIObject, collections.abc.MutableMapping):
3913class IMutableMapping(BaseIObject, AbsMutableMapping):
3914    __slots__ = ('_shared_memory', '_base_address', '_obj_size', '_offset', '_offset__data', '_offset__size_offset', '_offset__capacity_offset', '_offset__hashmap_offset', '_load_factor', '_load_factor_2', '_hash_bits', '_capacity', '_min_capacity', '_size', 'hashmap', 'hashmap_offset', 'buckets', '_refresh_counter', '_offset__refresh_counter_offset', 'ignore_rehash')
3915
3916    # @property
3917    # def __mro__(self) -> Tuple:
3918    #     return BaseIObject, AbsMutableMapping, dict
3919
3920    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableMapping = None) -> None:
3921        self._shared_memory = shared_memory
3922        self._base_address = shared_memory.base_address
3923        self._obj_size = None
3924        self._offset: Offset = None
3925        self._offset__data: Offset = None
3926        self._offset__size_offset: Offset = None
3927        self._offset__capacity_offset: Offset = None
3928        self._offset__hashmap_offset: Offset = None
3929        self._offset__refresh_counter_offset: Offset = None
3930        self._load_factor = 0.75
3931        self._load_factor_2 = 0.5625
3932        self._hash_bits: int = None
3933        self._capacity: int = None
3934        self._min_capacity: int = None
3935        self._size: int = None
3936        self.hashmap: IList = None
3937        self._refresh_counter: int = 0
3938        self.hashmap_offset: Offset = None
3939        self.buckets: Dict[int, IList] = dict()
3940
3941        self.ignore_rehash: bool = True
3942
3943        if offset is None:
3944            if obj is None:
3945                # obj = frozenset(set())
3946                data_len = 16
3947            else:
3948                data_len = len(obj)
3949
3950            self._size: int = 0
3951            self.hash_bits = 1
3952            self.capacity = int(ceil(data_len / self._load_factor))
3953            self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
3954
3955            offset, self._obj_size = shared_memory.malloc(ObjectType.tmutablemapping, 32)
3956            created_items_offsets: List[Offset] = list()
3957            try:
3958                self._offset = offset
3959                offset__data = offset + 16
3960                self._offset__data = offset__data
3961                self._offset__size_offset = offset__data + 0
3962                self._offset__capacity_offset = offset__data + 8
3963                self._offset__hashmap_offset = offset__data + 16
3964                self._offset__refresh_counter_offset = offset__data + 24
3965
3966                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
3967                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
3968                write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter)
3969
3970                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
3971                self.hashmap = cast(IList, self.hashmap)
3972                self.hashmap_offset = hashmap_offset
3973                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
3974                hashmap_capacity = self.capacity * 4
3975                self.hashmap.set_capacity(hashmap_capacity)
3976                self.hashmap.extend_with(hashmap_capacity, 0)
3977                hash_bits: int = self.hash_bits
3978                if obj is None:
3979                    pass
3980                elif isinstance(obj, IMutableMapping):
3981                    self._move_from(obj)
3982                else:
3983                    for key, value in obj.items():
3984                        self.__setitem__(key, value)
3985                
3986                self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
3987
3988                self.ignore_rehash = False
3989
3990                # print(f'Constructed {self.hashmap=}')
3991                # print(f'\tConstructed buckets:')
3992                # pdi(self.buckets)
3993                # for bucket_index, bucket in self.buckets.items():
3994                #     pdi(bucket)
3995                #     print(f'\t\t{bucket_index}:', bucket)
3996            except:
3997                self._free_mem()
3998                raise
3999        else:
4000            self._refresh_hashmap(offset)
4001            self.ignore_rehash = False
4002
4003            # self._offset = offset
4004            # offset__data = offset + 16
4005            # self._offset__data = offset__data
4006            # self._offset__size_offset: Offset = offset__data + 0
4007            # self._offset__capacity_offset: Offset = offset__data + 8
4008            # self._offset__hashmap_offset = offset__data + 16
4009
4010            # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
4011            # self.hash_bits = 1
4012            # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
4013            # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
4014            # self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
4015            
4016            # self.hashmap_offset = hashmap_offset
4017            # self.hashmap = IList(shared_memory, hashmap_offset)
4018            # # print(f'Adopted by {type(self)}: {self.hashmap=}')
4019            # item_info_index: int = 0
4020            # # for item_info_index in range(self.capacity):
4021            # #     field_type_index = item_info_index * 4 + 0
4022            # #     item_hash_index = item_info_index * 4 + 1
4023            # #     item_bucket_index = item_info_index * 4 + 2
4024            # #     item_value_index = item_info_index * 4 + 3
4025            # #     field_type = self.hashmap[field_type_index]
4026            # #     if 0 == field_type:
4027            # #         continue
4028            # #     elif 1 == field_type:
4029            # #         continue
4030            # #     elif 2 == field_type:
4031            # #         bucket_offset = self.hashmap[item_bucket_index]
4032            # #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4033            # #     else:
4034            # #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4035            
4036            # for item_info_index in range(0, self.capacity * 4, 4):
4037            #     field_type_index = item_info_index + 0
4038            #     item_hash_index = item_info_index + 1
4039            #     item_bucket_index = item_info_index + 2
4040            #     item_value_index = item_info_index + 3
4041            #     field_type = self.hashmap[field_type_index]
4042            #     if 0 == field_type:
4043            #         continue
4044            #     elif 1 == field_type:
4045            #         continue
4046            #     elif 2 == field_type:
4047            #         bucket_offset = self.hashmap[item_bucket_index]
4048            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4049            #     else:
4050            #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4051
4052            # self.ignore_rehash = False
4053            
4054            # # print(f'\tAdopted by {type(self)} buckets:')
4055            # # pdi(self.buckets)
4056            # # for bucket_index, bucket in self.buckets.items():
4057            # #     pdi(bucket)
4058            # #     print(f'\t\t{bucket_index}:', bucket)
4059
4060    def _refresh_hashmap(self, offset: Offset):
4061        # print(f'~ refresh_hashmap {offset}: {intro_func_repr_limited()}')
4062
4063        # ignore_rehash = self.ignore_rehash
4064        # self.ignore_rehash = True
4065
4066        self._hash_bits = None
4067        self._capacity = None
4068        self._min_capacity = None
4069        self._size = None
4070        self.hashmap = None
4071        self._refresh_counter = 0
4072        self.hashmap_offset = None
4073        self.buckets = dict()
4074
4075        shared_memory = self._shared_memory
4076        self._offset = offset
4077        offset__data = offset + 16
4078        self._offset__data = offset__data
4079        self._offset__size_offset: Offset = offset__data + 0
4080        self._offset__capacity_offset: Offset = offset__data + 8
4081        self._offset__hashmap_offset = offset__data + 16
4082        self._offset__refresh_counter_offset = offset__data + 24
4083
4084        self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
4085        self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
4086        self.hash_bits = 1
4087        self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
4088        hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
4089        self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
4090        
4091        self.hashmap_offset = hashmap_offset
4092        self.hashmap = IList(shared_memory, hashmap_offset)
4093        # print(f'Adopted by {type(self)}: {self.hashmap=}')
4094        # item_info_index: int = 0
4095        # for item_info_index in range(self.capacity):
4096        #     field_type_index = item_info_index * 4 + 0
4097        #     item_hash_index = item_info_index * 4 + 1
4098        #     item_bucket_index = item_info_index * 4 + 2
4099        #     item_value_index = item_info_index * 4 + 3
4100        #     field_type = self.hashmap[field_type_index]
4101        #     if 0 == field_type:
4102        #         continue
4103        #     elif 1 == field_type:
4104        #         continue
4105        #     elif 2 == field_type:
4106        #         bucket_offset = self.hashmap[item_bucket_index]
4107        #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4108        #     else:
4109        #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4110        
4111        for item_info_index in range(0, self.capacity * 4, 4):
4112            field_type_index = item_info_index + 0
4113            item_hash_index = item_info_index + 1
4114            item_bucket_index = item_info_index + 2
4115            item_value_index = item_info_index + 3
4116            field_type = self.hashmap[field_type_index]
4117            if 0 == field_type:
4118                continue
4119            elif 1 == field_type:
4120                continue
4121            elif 2 == field_type:
4122                bucket_offset = self.hashmap[item_bucket_index]
4123                self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4124            else:
4125                raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4126
4127        # self.ignore_rehash = ignore_rehash
4128    
4129    @property
4130    def refresh_counter(self):
4131        return read_uint64(self._base_address, self._offset__refresh_counter_offset)
4132    
4133    def _increase_refresh_counter(self):
4134        if self.ignore_rehash:
4135            # print(f'~ ignore increase_refresh_counter {self._offset}: {intro_func_repr_limited()}')
4136            pass
4137        else:
4138            # print(f'~ increase_refresh_counter {self._offset}: {intro_func_repr_limited()}')
4139            # refresh_counter = read_uint64(self._base_address, self._offset__refresh_counter_offset)
4140            # if self._refresh_counter != refresh_counter:
4141            #     print('~!!! increase_refresh_counter')
4142            
4143            self._refresh_counter += 1
4144            write_uint64(self._base_address, self._offset__refresh_counter_offset, self._refresh_counter)
4145    
4146    def _check_hashmap(self):
4147        if self.ignore_rehash:
4148            # print(f'~ ignore check_hashmap {self._offset}: {intro_func_repr_limited()}')
4149            return False
4150        else:
4151            base_address = self._base_address
4152            refresh_counter = read_uint64(base_address, self._offset__refresh_counter_offset)
4153            # hashmap_offset = read_uint64(base_address, self._offset__hashmap_offset)
4154            # if (self._refresh_counter != refresh_counter) or (self.hashmap_offset != hashmap_offset) or (self._hashmap._offset != hashmap_offset):
4155            if self._refresh_counter != refresh_counter:
4156                # print(f'~ check_hashmap {self._offset}: {intro_func_repr_limited()}')
4157                self._refresh_hashmap(self._offset)
4158                return True
4159            
4160            return False
4161
4162    # @property
4163    # def hashmap(self) -> IList:
4164    #     if self.ignore_rehash:
4165    #         return self._hashmap
4166    #     else:
4167    #         self._check_hashmap()
4168    #         return self._hashmap
4169    
4170    # @hashmap.setter
4171    # def hashmap(self, value: IList):
4172    #     self._hashmap = value
4173
4174    def _increase_size(self):
4175        self._size += 1
4176        write_uint64(self._base_address, self._offset__size_offset, self._size)
4177        if (self._size > self._capacity) or (self._size < self._min_capacity):
4178            self._rehash()
4179    
4180    def _decrease_size(self):
4181        self._size -= 1
4182        if self._size < 0:
4183            raise RuntimeError('Size of the set is negative')
4184
4185        write_uint64(self._base_address, self._offset__size_offset, self._size)
4186        if (self._size > self._capacity) or (self._size < self._min_capacity):
4187            self._rehash()
4188    
4189    def _move_from(self, other: 'IMutableMapping'):
4190        for key_hash, key_type, key_offset, value_type, value_offset in other.iter_offset_pop():
4191            self.setitem_as_offset(key_hash, key_type, key_offset, value_type, value_offset)
4192    
4193    def _rehash(self):
4194        if self.ignore_rehash:
4195            # print(f'~ ignore rehash {self._offset}: {intro_func_repr_limited()}')
4196            return 
4197        
4198        # print(f'~ rehash {self._offset}: {intro_func_repr_limited()}')
4199        self._increase_refresh_counter()
4200
4201        ignore_rehash = self.ignore_rehash
4202        self.ignore_rehash = True
4203
4204        new_other, new_other_offset, new_other_size = self._shared_memory.put_obj(self)
4205        new_other = cast(IMutableMapping, new_other)
4206
4207        other_capacity = new_other._capacity
4208        other_hash_bits = new_other._hash_bits
4209        other_min_capacity = new_other._min_capacity
4210        other_size = new_other._size
4211        # refresh_counter = new_other._refresh_counter
4212        other_hashmap = new_other.hashmap
4213        other_hashmap_offset = new_other.hashmap_offset
4214        other_buckets = new_other.buckets
4215        other_hashmap_offset_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset)
4216        other_size_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset)
4217        other_capacity_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset)
4218        # refresh_counter_bin = read_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset)
4219        
4220        new_other._capacity = self._capacity
4221        new_other._hash_bits = self._hash_bits
4222        new_other._min_capacity = self._min_capacity
4223        new_other._size = self._size
4224        # new_other._refresh_counter = self._refresh_counter
4225        new_other.hashmap = self.hashmap
4226        new_other.hashmap_offset = self.hashmap_offset
4227        new_other.buckets = self.buckets
4228        write_uint64(new_other._shared_memory.base_address, new_other._offset__hashmap_offset, read_uint64(self._base_address, self._offset__hashmap_offset))
4229        write_uint64(new_other._shared_memory.base_address, new_other._offset__size_offset, read_uint64(self._base_address, self._offset__size_offset))
4230        write_uint64(new_other._shared_memory.base_address, new_other._offset__capacity_offset, read_uint64(self._base_address, self._offset__capacity_offset))
4231        # write_uint64(new_other._shared_memory.base_address, new_other._offset__refresh_counter_offset, read_uint64(self._base_address, self._offset__refresh_counter_offset))
4232
4233        self._capacity = other_capacity
4234        self._hash_bits = other_hash_bits
4235        self._min_capacity = other_min_capacity
4236        self._size = other_size
4237        # self._refresh_counter = refresh_counter
4238        self.hashmap = other_hashmap
4239        self.hashmap_offset = other_hashmap_offset
4240        self.buckets = other_buckets
4241        write_uint64(self._base_address, self._offset__hashmap_offset, other_hashmap_offset_bin)
4242        write_uint64(self._base_address, self._offset__size_offset, other_size_bin)
4243        write_uint64(self._base_address, self._offset__capacity_offset, other_capacity_bin)
4244        # write_uint64(self._base_address, self._offset__refresh_counter_offset, refresh_counter_bin)
4245
4246        self._shared_memory.destroy_obj(new_other_offset)
4247
4248        self.ignore_rehash = ignore_rehash
4249
4250    def __len__(self):
4251        self._check_hashmap()
4252        return self._size
4253    
4254    def __iter__(self):
4255        self._check_hashmap()
4256        return IMutableMappingIterator(self)
4257    
4258    def iter_offset(self):
4259        self._check_hashmap()
4260        return IMutableMappingIteratorAsOffset(self)
4261    
4262    def iter_offset_pop(self):
4263        self._check_hashmap()
4264        return IMutableMappingIteratorAsOffset(self, True)
4265    
4266    # def __contains__(self, key: Hashable) -> bool:
4267    #     item_hash = hash(key)
4268    #     item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits)
4269    #     field_type_index = item_info_index * 4 + 0
4270    #     item_hash_index = item_info_index * 4 + 1
4271    #     item_bucket_index = item_info_index * 4 + 2
4272    #     item_value_index = item_info_index * 4 + 3
4273    #     field_type = self.hashmap[field_type_index]
4274    #     if 0 == field_type:
4275    #         return False
4276    #     elif 1 == field_type:
4277    #         return (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index])
4278    #     elif 2 == field_type:
4279    #         bucket = self.buckets[item_info_index]
4280    #         for sub_item_info_index in range(0, len(bucket), 4):
4281    #             bucket_field_type = bucket[sub_item_info_index + 0]
4282    #             if 0 == bucket_field_type:
4283    #                 continue
4284
4285    #             sub_item_hash_index = sub_item_info_index + 1
4286    #             sub_item_key_obj_index = sub_item_info_index + 2
4287    #             sub_item_value_obj_index = sub_item_info_index + 3
4288    #             if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
4289    #                 return True
4290            
4291    #         return False
4292    #     else:
4293    #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4294
4295    def __getitem__(self, key: Hashable):
4296        self._check_hashmap()
4297        item_hash = hash(key)
4298        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 4
4299        field_type_index = item_info_index + 0
4300        item_hash_index = item_info_index + 1
4301        item_bucket_index = item_info_index + 2
4302        item_value_index = item_info_index + 3
4303        field_type = self.hashmap[field_type_index]
4304        if 0 == field_type:
4305            raise KeyError
4306        elif 1 == field_type:
4307            if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
4308                return self.hashmap[item_value_index]
4309            else:
4310                raise KeyError
4311        elif 2 == field_type:
4312            bucket_offset = self.hashmap[item_bucket_index]
4313            try:
4314                bucket = self.buckets[item_info_index]
4315                if bucket._offset != bucket_offset:
4316                    raise KeyError
4317            except KeyError:
4318                raise
4319                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4320
4321            for sub_item_info_index in range(0, len(bucket), 4):
4322                bucket_field_type = bucket[sub_item_info_index + 0]
4323                if 0 == bucket_field_type:
4324                    continue
4325
4326                sub_item_hash_index = sub_item_info_index + 1
4327                sub_item_key_obj_index = sub_item_info_index + 2
4328                sub_item_value_obj_index = sub_item_info_index + 3
4329                if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
4330                    return bucket[sub_item_value_obj_index]
4331            
4332            raise KeyError
4333        else:
4334            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4335
4336    def __setitem__(self, key, value):
4337        self._check_hashmap()
4338        key_hash = hash(key)
4339        item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * 4
4340        field_type_index = item_info_index + 0
4341        item_hash_index = item_info_index + 1
4342        item_bucket_index = item_info_index + 2
4343        item_value_index = item_info_index + 3
4344        field_type = self.hashmap[field_type_index]
4345        if 0 == field_type:
4346            self.hashmap[field_type_index] = 1
4347            self.hashmap[item_hash_index] = key_hash
4348            self.hashmap[item_bucket_index] = key
4349            self.hashmap[item_value_index] = value
4350            self._increase_size()
4351            return
4352        elif 1 == field_type:
4353            if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
4354                self.hashmap[item_value_index] = value
4355                return
4356            
4357            self._increase_refresh_counter()
4358            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
4359            bucket = cast(IList, bucket)
4360            bucket.set_capacity(4)
4361            bucket.extend_with(4, 0)
4362            self.buckets[item_info_index] = bucket
4363            bucket[0] = 1
4364            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
4365            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
4366            self.hashmap.move_item_to_list(item_value_index, bucket, 3)
4367            self.hashmap[field_type_index] = 2
4368            self.hashmap[item_bucket_index] = bucket_offset
4369            bucket.append(1)
4370            bucket.append(key_hash)
4371            bucket.append(key)
4372            bucket.append(value)
4373            self._increase_size()
4374            return
4375        elif 2 == field_type:
4376            bucket_offset = self.hashmap[item_bucket_index]
4377            try:
4378                bucket = self.buckets[item_info_index]
4379                if bucket._offset != bucket_offset:
4380                    raise KeyError
4381            except KeyError:
4382                raise
4383                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4384
4385            bucket_len: int = len(bucket)
4386            for bucket_item_index in range(0, bucket_len, 4):
4387                bucket_field_type = bucket[bucket_item_index + 0]
4388                if 1 == bucket_field_type:
4389                    if (key_hash == bucket[bucket_item_index + 1]) and (key == bucket[bucket_item_index + 2]):
4390                        bucket[bucket_item_index + 3] = value
4391                        return
4392            
4393            for bucket_item_index in range(0, bucket_len, 4):
4394                bucket_field_type = bucket[bucket_item_index + 0]
4395                if 0 == bucket_field_type:
4396                    bucket[bucket_item_index + 0] = 1
4397                    bucket[bucket_item_index + 1] = key_hash
4398                    bucket[bucket_item_index + 2] = key
4399                    bucket[bucket_item_index + 3] = value
4400                    self._increase_size()
4401                    return
4402            else:
4403                bucket.append(1)
4404                bucket.append(key_hash)
4405                bucket.append(key)
4406                bucket.append(value)
4407                self._increase_size()
4408                return
4409        else:
4410            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4411
4412    def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset):
4413        self._check_hashmap()
4414        key = (key_type, key_offset)
4415        value = (value_type, value_offset)
4416        item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * 4
4417        field_type_index = item_info_index + 0
4418        item_hash_index = item_info_index + 1
4419        item_bucket_index = item_info_index + 2
4420        item_value_index = item_info_index + 3
4421        field_type = self.hashmap[field_type_index]
4422        if 0 == field_type:
4423            self.hashmap[field_type_index] = 1
4424            self.hashmap[item_hash_index] = key_hash
4425            self.hashmap.setitem_as_offset(item_bucket_index, key)
4426            self.hashmap.setitem_as_offset(item_value_index, value)
4427            self._increase_size()
4428            return
4429        elif 1 == field_type:
4430            if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap.getitem_as_offset(item_bucket_index)):
4431                self.hashmap.setitem_as_offset(item_value_index, value)
4432                return
4433            
4434            self._increase_refresh_counter()
4435            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
4436            bucket = cast(IList, bucket)
4437            bucket.set_capacity(4)
4438            bucket.extend_with(4, 0)
4439            self.buckets[item_info_index] = bucket
4440            bucket[0] = 1
4441            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
4442            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
4443            self.hashmap.move_item_to_list(item_value_index, bucket, 3)
4444            self.hashmap[field_type_index] = 2
4445            self.hashmap[item_bucket_index] = bucket_offset
4446            bucket.append(1)
4447            bucket.append(key_hash)
4448            bucket.append_as_offset(key)
4449            bucket.append_as_offset(value)
4450            self._increase_size()
4451            return
4452        elif 2 == field_type:
4453            bucket_offset = self.hashmap[item_bucket_index]
4454            try:
4455                bucket = self.buckets[item_info_index]
4456                if bucket._offset != bucket_offset:
4457                    raise KeyError
4458            except KeyError:
4459                raise
4460                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4461
4462            bucket_len: int = len(bucket)
4463            for bucket_item_index in range(0, bucket_len, 4):
4464                bucket_field_type = bucket[bucket_item_index + 0]
4465                if 1 == bucket_field_type:
4466                    if (key_hash == bucket[bucket_item_index + 1]) and (key == bucket.getitem_as_offset(bucket_item_index + 2)):
4467                        bucket.setitem_as_offset(bucket_item_index + 3, value)
4468                        return
4469            
4470            for bucket_item_index in range(0, bucket_len, 4):
4471                bucket_field_type = bucket[bucket_item_index + 0]
4472                if 0 == bucket_field_type:
4473                    bucket[bucket_item_index + 0] = 1
4474                    bucket[bucket_item_index + 1] = key_hash
4475                    bucket.setitem_as_offset(bucket_item_index + 2, key)
4476                    bucket.setitem_as_offset(bucket_item_index + 3, value)
4477                    self._increase_size()
4478                    return
4479            else:
4480                bucket.append(1)
4481                bucket.append(key_hash)
4482                bucket.append_as_offset(key)
4483                bucket.append_as_offset(value)
4484                self._increase_size()
4485                return
4486        else:
4487            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4488
4489    def __delitem__(self, key):
4490        self._check_hashmap()
4491        item_hash = hash(key)
4492        item_info_index: int = mask_least_significant_bits(item_hash, self.hash_bits) * 4
4493        field_type_index = item_info_index + 0
4494        item_hash_index = item_info_index + 1
4495        item_bucket_index = item_info_index + 2
4496        item_value_index = item_info_index + 3
4497        field_type = self.hashmap[field_type_index]
4498        if 0 == field_type:
4499            raise KeyError
4500        elif 1 == field_type:
4501            if (item_hash == self.hashmap[item_hash_index]) and (key == self.hashmap[item_bucket_index]):
4502                self.hashmap[field_type_index] = 0
4503                self.hashmap[item_hash_index] = None
4504                self.hashmap[item_bucket_index] = None
4505                self.hashmap[item_value_index] = None
4506                self._decrease_size()
4507                return
4508            else:
4509                raise KeyError
4510        elif 2 == field_type:
4511            bucket_offset = self.hashmap[item_bucket_index]
4512            try:
4513                bucket = self.buckets[item_info_index]
4514                if bucket._offset != bucket_offset:
4515                    raise KeyError
4516            except KeyError:
4517                raise
4518                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4519
4520            for sub_item_info_index in range(0, len(bucket), 4):
4521                bucket_field_type = bucket[sub_item_info_index + 0]
4522                if 0 == bucket_field_type:
4523                    continue
4524
4525                sub_item_hash_index = sub_item_info_index + 1
4526                sub_item_key_obj_index = sub_item_info_index + 2
4527                sub_item_value_obj_index = sub_item_info_index + 3
4528                if (item_hash == bucket[sub_item_hash_index]) and (key == bucket[sub_item_key_obj_index]):
4529                    bucket[sub_item_info_index + 0] = 0
4530                    bucket[sub_item_hash_index] = None
4531                    bucket[sub_item_key_obj_index] = None
4532                    bucket[sub_item_value_obj_index] = None
4533                    self._decrease_size()
4534                    return
4535
4536            raise KeyError
4537        else:
4538            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4539
4540    @property
4541    def hash_bits(self) -> int:
4542        return self._hash_bits
4543
4544    @hash_bits.setter
4545    def hash_bits(self, value: int) -> None:
4546        self._hash_bits = value
4547        self._capacity = 2 ** value
4548    
4549    @property
4550    def capacity(self) -> int:
4551        return self._capacity
4552
4553    @capacity.setter
4554    def capacity(self, value: int) -> None:
4555        if value <= self._capacity:
4556            return
4557        
4558        if value <= 2:
4559            self.hash_bits = 1
4560        else:
4561            self.hash_bits = int(ceil(log2(value)))
4562    
4563    def __str__(self) -> str:
4564        self._check_hashmap()
4565        return dict(self).__str__()
4566
4567    def __repr__(self) -> str:
4568        self._check_hashmap()
4569        return dict(self).__repr__()
4570
4571    def _free_mem(self):
4572        if self._offset is not None:
4573            if self.hashmap_offset is not None:
4574                self._check_hashmap()
4575            
4576            for _, bucket in self.buckets.items():
4577                self._shared_memory.destroy_obj(bucket._offset)
4578            self.buckets.clear()
4579            if self.hashmap_offset is not None:
4580                self._shared_memory.destroy_obj(self.hashmap_offset)
4581                self.hashmap_offset = None
4582            
4583            self._shared_memory.free(self._offset)
4584            self._offset = None
IMutableMapping( shared_memory: SharedMemory, offset: int = None, obj: collections.abc.MutableMapping = None)
3920    def __init__(self, shared_memory: 'SharedMemory', offset: Offset = None, obj: AbsMutableMapping = None) -> None:
3921        self._shared_memory = shared_memory
3922        self._base_address = shared_memory.base_address
3923        self._obj_size = None
3924        self._offset: Offset = None
3925        self._offset__data: Offset = None
3926        self._offset__size_offset: Offset = None
3927        self._offset__capacity_offset: Offset = None
3928        self._offset__hashmap_offset: Offset = None
3929        self._offset__refresh_counter_offset: Offset = None
3930        self._load_factor = 0.75
3931        self._load_factor_2 = 0.5625
3932        self._hash_bits: int = None
3933        self._capacity: int = None
3934        self._min_capacity: int = None
3935        self._size: int = None
3936        self.hashmap: IList = None
3937        self._refresh_counter: int = 0
3938        self.hashmap_offset: Offset = None
3939        self.buckets: Dict[int, IList] = dict()
3940
3941        self.ignore_rehash: bool = True
3942
3943        if offset is None:
3944            if obj is None:
3945                # obj = frozenset(set())
3946                data_len = 16
3947            else:
3948                data_len = len(obj)
3949
3950            self._size: int = 0
3951            self.hash_bits = 1
3952            self.capacity = int(ceil(data_len / self._load_factor))
3953            self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
3954
3955            offset, self._obj_size = shared_memory.malloc(ObjectType.tmutablemapping, 32)
3956            created_items_offsets: List[Offset] = list()
3957            try:
3958                self._offset = offset
3959                offset__data = offset + 16
3960                self._offset__data = offset__data
3961                self._offset__size_offset = offset__data + 0
3962                self._offset__capacity_offset = offset__data + 8
3963                self._offset__hashmap_offset = offset__data + 16
3964                self._offset__refresh_counter_offset = offset__data + 24
3965
3966                write_uint64(shared_memory.base_address, self._offset__size_offset, self._size)
3967                write_uint64(shared_memory.base_address, self._offset__capacity_offset, self.capacity)
3968                write_uint64(shared_memory.base_address, self._offset__refresh_counter_offset, self._refresh_counter)
3969
3970                self.hashmap, hashmap_offset, _ = shared_memory.put_obj(list())
3971                self.hashmap = cast(IList, self.hashmap)
3972                self.hashmap_offset = hashmap_offset
3973                write_uint64(shared_memory.base_address, self._offset__hashmap_offset, hashmap_offset)
3974                hashmap_capacity = self.capacity * 4
3975                self.hashmap.set_capacity(hashmap_capacity)
3976                self.hashmap.extend_with(hashmap_capacity, 0)
3977                hash_bits: int = self.hash_bits
3978                if obj is None:
3979                    pass
3980                elif isinstance(obj, IMutableMapping):
3981                    self._move_from(obj)
3982                else:
3983                    for key, value in obj.items():
3984                        self.__setitem__(key, value)
3985                
3986                self._refresh_counter = read_uint64(shared_memory.base_address, self._offset__refresh_counter_offset)
3987
3988                self.ignore_rehash = False
3989
3990                # print(f'Constructed {self.hashmap=}')
3991                # print(f'\tConstructed buckets:')
3992                # pdi(self.buckets)
3993                # for bucket_index, bucket in self.buckets.items():
3994                #     pdi(bucket)
3995                #     print(f'\t\t{bucket_index}:', bucket)
3996            except:
3997                self._free_mem()
3998                raise
3999        else:
4000            self._refresh_hashmap(offset)
4001            self.ignore_rehash = False
4002
4003            # self._offset = offset
4004            # offset__data = offset + 16
4005            # self._offset__data = offset__data
4006            # self._offset__size_offset: Offset = offset__data + 0
4007            # self._offset__capacity_offset: Offset = offset__data + 8
4008            # self._offset__hashmap_offset = offset__data + 16
4009
4010            # self._size = read_uint64(shared_memory.base_address, self._offset__size_offset)
4011            # self.hash_bits = 1
4012            # self.capacity = read_uint64(shared_memory.base_address, self._offset__capacity_offset)
4013            # hashmap_offset = read_uint64(shared_memory.base_address, self._offset__hashmap_offset)
4014            # self._min_capacity = int(ceil(self._capacity * self._load_factor_2))
4015            
4016            # self.hashmap_offset = hashmap_offset
4017            # self.hashmap = IList(shared_memory, hashmap_offset)
4018            # # print(f'Adopted by {type(self)}: {self.hashmap=}')
4019            # item_info_index: int = 0
4020            # # for item_info_index in range(self.capacity):
4021            # #     field_type_index = item_info_index * 4 + 0
4022            # #     item_hash_index = item_info_index * 4 + 1
4023            # #     item_bucket_index = item_info_index * 4 + 2
4024            # #     item_value_index = item_info_index * 4 + 3
4025            # #     field_type = self.hashmap[field_type_index]
4026            # #     if 0 == field_type:
4027            # #         continue
4028            # #     elif 1 == field_type:
4029            # #         continue
4030            # #     elif 2 == field_type:
4031            # #         bucket_offset = self.hashmap[item_bucket_index]
4032            # #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4033            # #     else:
4034            # #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4035            
4036            # for item_info_index in range(0, self.capacity * 4, 4):
4037            #     field_type_index = item_info_index + 0
4038            #     item_hash_index = item_info_index + 1
4039            #     item_bucket_index = item_info_index + 2
4040            #     item_value_index = item_info_index + 3
4041            #     field_type = self.hashmap[field_type_index]
4042            #     if 0 == field_type:
4043            #         continue
4044            #     elif 1 == field_type:
4045            #         continue
4046            #     elif 2 == field_type:
4047            #         bucket_offset = self.hashmap[item_bucket_index]
4048            #         self.buckets[item_info_index] = IList(shared_memory, bucket_offset)
4049            #     else:
4050            #         raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4051
4052            # self.ignore_rehash = False
4053            
4054            # # print(f'\tAdopted by {type(self)} buckets:')
4055            # # pdi(self.buckets)
4056            # # for bucket_index, bucket in self.buckets.items():
4057            # #     pdi(bucket)
4058            # #     print(f'\t\t{bucket_index}:', bucket)
hashmap: IList
hashmap_offset: int
buckets: Dict[int, IList]
ignore_rehash: bool
refresh_counter
4129    @property
4130    def refresh_counter(self):
4131        return read_uint64(self._base_address, self._offset__refresh_counter_offset)
def iter_offset(self):
4258    def iter_offset(self):
4259        self._check_hashmap()
4260        return IMutableMappingIteratorAsOffset(self)
def iter_offset_pop(self):
4262    def iter_offset_pop(self):
4263        self._check_hashmap()
4264        return IMutableMappingIteratorAsOffset(self, True)
def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset):
4412    def setitem_as_offset(self, key_hash, key_type, key_offset, value_type, value_offset):
4413        self._check_hashmap()
4414        key = (key_type, key_offset)
4415        value = (value_type, value_offset)
4416        item_info_index: int = mask_least_significant_bits(key_hash, self.hash_bits) * 4
4417        field_type_index = item_info_index + 0
4418        item_hash_index = item_info_index + 1
4419        item_bucket_index = item_info_index + 2
4420        item_value_index = item_info_index + 3
4421        field_type = self.hashmap[field_type_index]
4422        if 0 == field_type:
4423            self.hashmap[field_type_index] = 1
4424            self.hashmap[item_hash_index] = key_hash
4425            self.hashmap.setitem_as_offset(item_bucket_index, key)
4426            self.hashmap.setitem_as_offset(item_value_index, value)
4427            self._increase_size()
4428            return
4429        elif 1 == field_type:
4430            if (key_hash == self.hashmap[item_hash_index]) and (key == self.hashmap.getitem_as_offset(item_bucket_index)):
4431                self.hashmap.setitem_as_offset(item_value_index, value)
4432                return
4433            
4434            self._increase_refresh_counter()
4435            bucket, bucket_offset, _ = self._shared_memory.put_obj(list())
4436            bucket = cast(IList, bucket)
4437            bucket.set_capacity(4)
4438            bucket.extend_with(4, 0)
4439            self.buckets[item_info_index] = bucket
4440            bucket[0] = 1
4441            self.hashmap.move_item_to_list(item_hash_index, bucket, 1)
4442            self.hashmap.move_item_to_list(item_bucket_index, bucket, 2)
4443            self.hashmap.move_item_to_list(item_value_index, bucket, 3)
4444            self.hashmap[field_type_index] = 2
4445            self.hashmap[item_bucket_index] = bucket_offset
4446            bucket.append(1)
4447            bucket.append(key_hash)
4448            bucket.append_as_offset(key)
4449            bucket.append_as_offset(value)
4450            self._increase_size()
4451            return
4452        elif 2 == field_type:
4453            bucket_offset = self.hashmap[item_bucket_index]
4454            try:
4455                bucket = self.buckets[item_info_index]
4456                if bucket._offset != bucket_offset:
4457                    raise KeyError
4458            except KeyError:
4459                raise
4460                self.buckets[item_info_index] = bucket = IList(self._shared_memory, bucket_offset)
4461
4462            bucket_len: int = len(bucket)
4463            for bucket_item_index in range(0, bucket_len, 4):
4464                bucket_field_type = bucket[bucket_item_index + 0]
4465                if 1 == bucket_field_type:
4466                    if (key_hash == bucket[bucket_item_index + 1]) and (key == bucket.getitem_as_offset(bucket_item_index + 2)):
4467                        bucket.setitem_as_offset(bucket_item_index + 3, value)
4468                        return
4469            
4470            for bucket_item_index in range(0, bucket_len, 4):
4471                bucket_field_type = bucket[bucket_item_index + 0]
4472                if 0 == bucket_field_type:
4473                    bucket[bucket_item_index + 0] = 1
4474                    bucket[bucket_item_index + 1] = key_hash
4475                    bucket.setitem_as_offset(bucket_item_index + 2, key)
4476                    bucket.setitem_as_offset(bucket_item_index + 3, value)
4477                    self._increase_size()
4478                    return
4479            else:
4480                bucket.append(1)
4481                bucket.append(key_hash)
4482                bucket.append_as_offset(key)
4483                bucket.append_as_offset(value)
4484                self._increase_size()
4485                return
4486        else:
4487            raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
hash_bits: int
4540    @property
4541    def hash_bits(self) -> int:
4542        return self._hash_bits
capacity: int
4549    @property
4550    def capacity(self) -> int:
4551        return self._capacity
Inherited Members
collections.abc.MutableMapping
pop
popitem
clear
update
setdefault
collections.abc.Mapping
get
keys
items
values
class IMutableMappingIterator:
4587class IMutableMappingIterator:
4588    def __init__(self, imapping: IMutableMapping) -> None:
4589        self._imapping = imapping
4590        self._index = 0
4591        self._sub_index = 0
4592    
4593    def __next__(self):
4594        if self._imapping._check_hashmap():
4595            raise RuntimeError("Dictionary's hashmap changed during iteration")
4596
4597        while self._index < self._imapping.capacity:
4598            item_info_index: int = self._index * 4
4599            field_type_index = item_info_index + 0
4600            item_hash_index = item_info_index + 1
4601            item_bucket_index = item_info_index + 2
4602            item_value_index = item_info_index + 3
4603            field_type = self._imapping.hashmap[field_type_index]
4604            if 0 == field_type:
4605                self._index += 1
4606                continue
4607            elif 1 == field_type:
4608                result = self._imapping.hashmap[item_bucket_index]
4609                self._index += 1
4610                return result
4611            elif 2 == field_type:
4612                bucket_offset = self._imapping.hashmap[item_bucket_index]
4613                try:
4614                    bucket = self._imapping.buckets[item_info_index]
4615                    if bucket._offset != bucket_offset:
4616                        raise KeyError
4617                except KeyError:
4618                    raise
4619                    self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset)
4620
4621                bucket_len = len(bucket)
4622                sub_item_info_index = self._sub_index
4623                while (sub_item_info_index * 4) < bucket_len:
4624                    sub_item_field_type_index = sub_item_info_index * 4 + 0
4625                    if bucket[sub_item_field_type_index] == 0:
4626                        sub_item_info_index += 1
4627                        continue
4628
4629                    sub_item_hash_index = sub_item_info_index * 4 + 1
4630                    sub_item_key_obj_index = sub_item_info_index * 4 + 2
4631                    sub_item_value_obj_index = sub_item_info_index * 4 + 3
4632                    result = bucket[sub_item_key_obj_index]
4633                    self._sub_index += 1
4634                    return result
4635                else:
4636                    self._sub_index = 0
4637                    self._index += 1
4638                    continue
4639            else:
4640                raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4641        else:
4642            raise StopIteration
4643    
4644    def __iter__(self):
4645        return self
IMutableMappingIterator( imapping: IMutableMapping)
4588    def __init__(self, imapping: IMutableMapping) -> None:
4589        self._imapping = imapping
4590        self._index = 0
4591        self._sub_index = 0
class IMutableMappingIteratorAsOffset:
4648class IMutableMappingIteratorAsOffset:
4649    def __init__(self, imapping: IMutableMapping, pop: bool = False) -> None:
4650        self._imapping = imapping
4651        self._pop: bool = pop
4652        self._index = 0
4653        self._sub_index = 0
4654    
4655    def __next__(self):
4656        if self._imapping._check_hashmap():
4657            raise RuntimeError("Dictionary's hashmap changed during iteration")
4658
4659        while self._index < self._imapping.capacity:
4660            item_info_index: int = self._index * 4
4661            field_type_index = item_info_index + 0
4662            item_hash_index = item_info_index + 1
4663            item_bucket_index = item_info_index + 2
4664            item_value_index = item_info_index + 3
4665            field_type = self._imapping.hashmap[field_type_index]
4666            if 0 == field_type:
4667                self._index += 1
4668                continue
4669            elif 1 == field_type:
4670                key_hash = self._imapping.hashmap[item_hash_index]
4671                key_type, key_offset = self._imapping.hashmap.getitem_as_offset(item_bucket_index)
4672                value_type, value_offset = self._imapping.hashmap.getitem_as_offset(item_value_index)
4673                if self._pop:
4674                    self._imapping.hashmap[field_type_index] = 0
4675                    self._imapping.hashmap[item_hash_index] = None
4676                    self._imapping.hashmap.setitem_as_offset(item_bucket_index, (0, 0), False)
4677                    self._imapping.hashmap.setitem_as_offset(item_value_index, (0, 0), False)
4678
4679                self._index += 1
4680                return key_hash, key_type, key_offset, value_type, value_offset
4681            elif 2 == field_type:
4682                bucket_offset = self._imapping.hashmap[item_bucket_index]
4683                try:
4684                    bucket = self._imapping.buckets[item_info_index]
4685                    if bucket._offset != bucket_offset:
4686                        raise KeyError
4687                except KeyError:
4688                    raise
4689                    self._imapping.buckets[item_info_index] = bucket = IList(self._imapping._shared_memory, bucket_offset)
4690
4691                bucket_len = len(bucket)
4692                sub_item_info_index = self._sub_index
4693                while (sub_item_info_index * 4) < bucket_len:
4694                    sub_item_field_type_index = sub_item_info_index * 4 + 0
4695                    if bucket[sub_item_field_type_index] == 0:
4696                        sub_item_info_index += 1
4697                        continue
4698
4699                    sub_item_hash_index = sub_item_info_index * 4 + 1
4700                    sub_item_key_obj_index = sub_item_info_index * 4 + 2
4701                    sub_item_value_obj_index = sub_item_info_index * 4 + 3
4702
4703                    key_hash = bucket[sub_item_hash_index]
4704                    key_type, key_offset = bucket.getitem_as_offset(sub_item_key_obj_index)
4705                    value_type, value_offset = bucket.getitem_as_offset(sub_item_value_obj_index)
4706                    if self._pop:
4707                        bucket[sub_item_field_type_index] = 0
4708                        bucket[sub_item_hash_index] = None
4709                        bucket.setitem_as_offset(sub_item_key_obj_index, (0, 0), False)
4710                        bucket.setitem_as_offset(sub_item_value_obj_index, (0, 0), False)
4711
4712                    self._sub_index += 1
4713                    return key_hash, key_type, key_offset, value_type, value_offset
4714                else:
4715                    self._sub_index = 0
4716                    self._index += 1
4717                    continue
4718            else:
4719                raise ValueError(f'Unknown MutableMappingHashmapFieldTypes field type at {item_info_index=}: {field_type}')
4720        else:
4721            raise StopIteration
4722    
4723    def __iter__(self):
4724        return self
IMutableMappingIteratorAsOffset( imapping: IMutableMapping, pop: bool = False)
4649    def __init__(self, imapping: IMutableMapping, pop: bool = False) -> None:
4650        self._imapping = imapping
4651        self._pop: bool = pop
4652        self._index = 0
4653        self._sub_index = 0
class TMutableMapping:
4727class TMutableMapping:
4728    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMutableMapping) -> Tuple[IMutableMapping, Offset, Size]:
4729        obj: IMutableMapping = IMutableMapping(shared_memory, obj=obj)
4730        return obj, obj._offset, obj._obj_size
4731    
4732    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableMapping:
4733        if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset):
4734            raise WrongObjectTypeError
4735        
4736        return IMutableMapping(shared_memory, offset)
4737    
4738    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
4739        if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset):
4740            raise WrongObjectTypeError
4741        
4742        obj: IMutableMapping = IMutableMapping(shared_memory, offset)
4743        obj._free_mem()
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: collections.abc.MutableMapping) -> Tuple[IMutableMapping, int, int]:
4728    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: AbsMutableMapping) -> Tuple[IMutableMapping, Offset, Size]:
4729        obj: IMutableMapping = IMutableMapping(shared_memory, obj=obj)
4730        return obj, obj._offset, obj._obj_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> IMutableMapping:
4732    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> IMutableMapping:
4733        if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset):
4734            raise WrongObjectTypeError
4735        
4736        return IMutableMapping(shared_memory, offset)
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
4738    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
4739        if ObjectType.tmutablemapping != read_uint64(shared_memory.base_address, offset):
4740            raise WrongObjectTypeError
4741        
4742        obj: IMutableMapping = IMutableMapping(shared_memory, offset)
4743        obj._free_mem()
class ForceGeneralObjectCopy:
4750class ForceGeneralObjectCopy:
4751    def __init__(self, obj: Any) -> None:
4752        self.obj = obj
ForceGeneralObjectCopy(obj: typing.Any)
4751    def __init__(self, obj: Any) -> None:
4752        self.obj = obj
obj
FGeneralObjectCopy = <class 'ForceGeneralObjectCopy'>
forcegeneralobjectcopy = <class 'ForceGeneralObjectCopy'>
fgeneralobjectcopy = <class 'ForceGeneralObjectCopy'>
class ForceGeneralObjectInplace:
4760class ForceGeneralObjectInplace:
4761    def __init__(self, obj: Any) -> None:
4762        self.obj = obj
ForceGeneralObjectInplace(obj: typing.Any)
4761    def __init__(self, obj: Any) -> None:
4762        self.obj = obj
obj
FGeneralObjectInplace = <class 'ForceGeneralObjectInplace'>
forcegeneralobjectinplace = <class 'ForceGeneralObjectInplace'>
fgeneralobjectinplace = <class 'ForceGeneralObjectInplace'>
class GeneralObjectOffsets(enum.IntEnum):
4770class GeneralObjectOffsets(IntEnum):
4771    pickled_obj = 0
4772    obj_dict = 1
4773    setable_data_descriptor_field_names = 2

An enumeration.

setable_data_descriptor_field_names = <GeneralObjectOffsets.setable_data_descriptor_field_names: 2>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
def tgeneralobject_custom_getattribute(self, name):
4776def tgeneralobject_custom_getattribute(self, name):
4777    if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'):
4778        return object.__getattribute__(self, name)
4779    
4780    try:
4781        return self._tgeneralobject_imutablemapping_attributes[name]
4782    except KeyError:
4783        pass
4784    
4785    return object.__getattribute__(self, name)
def tgeneralobject_custom_setattr(self, name, value):
4788def tgeneralobject_custom_setattr(self, name, value):
4789    if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'):
4790        object.__setattr__(self, name, value)
4791    else:
4792        if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
4793            object.__setattr__(self, name, value)
4794            return
4795
4796        # try:
4797        #     if name in self._tgeneralobject_setable_data_descriptor_field_names:
4798        #         object.__setattr__(self, name, value)
4799        # except AttributeError:
4800        #     pass
4801        
4802        self._tgeneralobject_imutablemapping_attributes[name] = value
def tgeneralobject_custom_delattr(self, name):
4805def tgeneralobject_custom_delattr(self, name):
4806    if name in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or name.startswith('__'):
4807        object.__delattr__(self, name)
4808    else:
4809        has_value_static: bool = False
4810        value_static = None
4811        try:
4812            value_static = getattr_static(self, name)
4813            has_value_static = True
4814        except AttributeError:
4815            pass
4816
4817        deleted: bool = False
4818        try:
4819            if has_value_static and isfunction(value_static) or ismethod(value_static) or isinstance(value_static, FrameType) or isinstance(value_static, CodeType) or ismethoddescriptor(value_static):
4820                object.__delattr__(self, name)
4821                return
4822        except AttributeError:
4823            pass
4824
4825        try:
4826            if has_value_static and (not isclass(value_static)) and hasattr(value_static, "__delete__"):
4827                object.__delattr__(self, name)
4828                deleted = True
4829        except AttributeError:
4830            pass
4831        
4832        try:
4833            del self._tgeneralobject_imutablemapping_attributes[name]
4834            return
4835        except KeyError:
4836            pass
4837        
4838        if not deleted:
4839            object.__delattr__(self, name)
def tgeneralobject_wrap_obj( obj, mapped_obj_dict: IMutableMapping, setable_data_descriptor_field_names: typing.Set[str], init_mapped_obj_dict: bool):
4842def tgeneralobject_wrap_obj(obj, mapped_obj_dict: IMutableMapping, setable_data_descriptor_field_names: Set[str], init_mapped_obj_dict: bool):
4843    base = obj.__class__
4844    setattr(obj, '_tgeneralobject_imutablemapping_attributes', mapped_obj_dict)
4845    setattr(obj, '_tgeneralobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names)
4846    if init_mapped_obj_dict:
4847        object_fields = set(dir(object))
4848        obj_fields = set(dir(obj)) - object_fields
4849        for key in obj_fields:
4850            value = getattr_static(obj, key)
4851            if key in {'_tgeneralobject_imutablemapping_attributes', '_tgeneralobject_setable_data_descriptor_field_names'} or key.startswith('__'):
4852                continue
4853
4854            if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
4855                continue
4856
4857            if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))):
4858                continue
4859
4860            if is_setable_data_descriptor(value):
4861                setable_data_descriptor_field_names.add(key)
4862            
4863            mapped_obj_dict[key] = getattr(obj, key)
4864    
4865    NewClass = type(
4866        base.__name__ + 'WrappedByTGeneralObject',
4867        (base,),
4868        {
4869            '__getattribute__': tgeneralobject_custom_getattribute,
4870            '__setattr__': tgeneralobject_custom_setattr,
4871            '__delattr__': tgeneralobject_custom_delattr,
4872        }
4873    )
4874    obj.__class__ = NewClass
class TGeneralObject:
4877class TGeneralObject:
4878    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
4879        offset, real_size = shared_memory.malloc(ObjectType.tgeneralobject, 24)
4880        created_items_offsets: List[Offset] = list()
4881        try:
4882            make_changes_inplace: bool = True
4883            if isinstance(obj, ForceGeneralObjectCopy):
4884                obj = obj.obj
4885                make_changes_inplace = False
4886            elif isinstance(obj, ForceGeneralObjectInplace):
4887                obj = obj.obj
4888                make_changes_inplace = True
4889
4890            dumped_obj: bytes = pickle_dumps(obj)
4891            dumped_mapped_obj_type, dumped_obj_offset, dumped_obj_type_size = shared_memory.put_obj(dumped_obj)
4892            created_items_offsets.append(dumped_obj_offset)
4893            mapped_obj_dict, obj_dict_offset, obj_dict_size = shared_memory.put_obj(dict())
4894            created_items_offsets.append(obj_dict_offset)
4895            
4896            write_uint64(shared_memory.base_address, offset + 16 + 0, dumped_obj_offset)
4897            write_uint64(shared_memory.base_address, offset + 16 + 8, obj_dict_offset)
4898            
4899            setable_data_descriptor_field_names: Set[str] = set()
4900
4901            mapped_obj = None
4902            if make_changes_inplace:
4903                tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, True)
4904                mapped_obj = obj
4905            else:
4906                # mapped_obj = self.init_from_shared_memory(shared_memory, offset)
4907                mapped_obj = pickle_loads(dumped_obj)
4908                tgeneralobject_wrap_obj(mapped_obj, mapped_obj_dict, setable_data_descriptor_field_names, True)
4909
4910            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
4911            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
4912            write_uint64(shared_memory.base_address, offset + 16 + 16, dumped_setable_data_descriptor_field_names_offset)
4913        except:
4914            shared_memory.free(offset)
4915            for item_offset in created_items_offsets:
4916                shared_memory.destroy_obj(item_offset)
4917            
4918            raise
4919
4920        return mapped_obj, offset, real_size
4921    
4922    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
4923        if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset):
4924            raise WrongObjectTypeError
4925
4926        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4927        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
4928        
4929        obj_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
4930        mapped_obj_dict = shared_memory.get_obj(obj_dict_offset)
4931        obj = pickle_loads(dumped_obj)
4932        
4933        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
4934        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
4935        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
4936        
4937        tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, False)
4938        return obj
4939    
4940    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
4941        if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset):
4942            raise WrongObjectTypeError
4943
4944        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4945        shared_memory.destroy_obj(dumped_obj_offset)
4946        obj_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
4947        if obj_dict_offset:
4948            shared_memory.destroy_obj(obj_dict_offset)
4949        
4950        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
4951        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
4952        shared_memory.free(offset)
4953    
4954    # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
4955    #     if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + 0):
4956    #         raise WrongObjectTypeError
4957
4958    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4959    #     return shared_memory.get_obj_buffer(dumped_obj_offset)
4960    
4961    # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
4962    #     if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset + 0):
4963    #         raise WrongObjectTypeError
4964
4965
4966    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4967    #     return shared_memory.get_obj_buffer_2(dumped_obj_offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: typing.Any) -> Tuple[Any, int, int]:
4878    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
4879        offset, real_size = shared_memory.malloc(ObjectType.tgeneralobject, 24)
4880        created_items_offsets: List[Offset] = list()
4881        try:
4882            make_changes_inplace: bool = True
4883            if isinstance(obj, ForceGeneralObjectCopy):
4884                obj = obj.obj
4885                make_changes_inplace = False
4886            elif isinstance(obj, ForceGeneralObjectInplace):
4887                obj = obj.obj
4888                make_changes_inplace = True
4889
4890            dumped_obj: bytes = pickle_dumps(obj)
4891            dumped_mapped_obj_type, dumped_obj_offset, dumped_obj_type_size = shared_memory.put_obj(dumped_obj)
4892            created_items_offsets.append(dumped_obj_offset)
4893            mapped_obj_dict, obj_dict_offset, obj_dict_size = shared_memory.put_obj(dict())
4894            created_items_offsets.append(obj_dict_offset)
4895            
4896            write_uint64(shared_memory.base_address, offset + 16 + 0, dumped_obj_offset)
4897            write_uint64(shared_memory.base_address, offset + 16 + 8, obj_dict_offset)
4898            
4899            setable_data_descriptor_field_names: Set[str] = set()
4900
4901            mapped_obj = None
4902            if make_changes_inplace:
4903                tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, True)
4904                mapped_obj = obj
4905            else:
4906                # mapped_obj = self.init_from_shared_memory(shared_memory, offset)
4907                mapped_obj = pickle_loads(dumped_obj)
4908                tgeneralobject_wrap_obj(mapped_obj, mapped_obj_dict, setable_data_descriptor_field_names, True)
4909
4910            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
4911            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
4912            write_uint64(shared_memory.base_address, offset + 16 + 16, dumped_setable_data_descriptor_field_names_offset)
4913        except:
4914            shared_memory.free(offset)
4915            for item_offset in created_items_offsets:
4916                shared_memory.destroy_obj(item_offset)
4917            
4918            raise
4919
4920        return mapped_obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> Any:
4922    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
4923        if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset):
4924            raise WrongObjectTypeError
4925
4926        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4927        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
4928        
4929        obj_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
4930        mapped_obj_dict = shared_memory.get_obj(obj_dict_offset)
4931        obj = pickle_loads(dumped_obj)
4932        
4933        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
4934        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
4935        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
4936        
4937        tgeneralobject_wrap_obj(obj, mapped_obj_dict, setable_data_descriptor_field_names, False)
4938        return obj
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
4940    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
4941        if ObjectType.tgeneralobject != read_uint64(shared_memory.base_address, offset):
4942            raise WrongObjectTypeError
4943
4944        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
4945        shared_memory.destroy_obj(dumped_obj_offset)
4946        obj_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
4947        if obj_dict_offset:
4948            shared_memory.destroy_obj(obj_dict_offset)
4949        
4950        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
4951        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
4952        shared_memory.free(offset)
class ForceStaticObjectCopy:
4974class ForceStaticObjectCopy:
4975    def __init__(self, obj: Any) -> None:
4976        self.obj = obj
ForceStaticObjectCopy(obj: typing.Any)
4975    def __init__(self, obj: Any) -> None:
4976        self.obj = obj
obj
FStaticObjectCopy = <class 'ForceStaticObjectCopy'>
forcestaticobjectcopy = <class 'ForceStaticObjectCopy'>
fstaticobjectcopy = <class 'ForceStaticObjectCopy'>
class ForceStaticObjectInplace:
4984class ForceStaticObjectInplace:
4985    def __init__(self, obj: Any) -> None:
4986        self.obj = obj
ForceStaticObjectInplace(obj: typing.Any)
4985    def __init__(self, obj: Any) -> None:
4986        self.obj = obj
obj
FStaticObjectInplace = <class 'ForceStaticObjectInplace'>
forcestaticobjectinplace = <class 'ForceStaticObjectInplace'>
fstaticobjectinplace = <class 'ForceStaticObjectInplace'>
class StaticObjectOffsets(enum.IntEnum):
4994class StaticObjectOffsets(IntEnum):
4995    pickled_obj = 0
4996    pickled_attributes_dict = 1
4997    attributes_slots = 2
4998    setable_data_descriptor_field_names = 3

An enumeration.

pickled_obj = <StaticObjectOffsets.pickled_obj: 0>
pickled_attributes_dict = <StaticObjectOffsets.pickled_attributes_dict: 1>
attributes_slots = <StaticObjectOffsets.attributes_slots: 2>
setable_data_descriptor_field_names = <StaticObjectOffsets.setable_data_descriptor_field_names: 3>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
def tstaticobject_custom_getattribute(self, name):
5001def tstaticobject_custom_getattribute(self, name):
5002    if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'):
5003        return object.__getattribute__(self, name)
5004    
5005    try:
5006        return self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]]
5007    except KeyError:
5008        pass
5009    
5010    return object.__getattribute__(self, name)
def tstaticobject_custom_setattr(self, name, value):
5013def tstaticobject_custom_setattr(self, name, value):
5014    if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'):
5015        object.__setattr__(self, name, value)
5016    else:
5017        if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5018            object.__setattr__(self, name, value)
5019            return
5020        
5021        # try:
5022        #     if name in self._tstaticobject_setable_data_descriptor_field_names:
5023        #         object.__setattr__(self, name, value)
5024        # except AttributeError:
5025        #     pass
5026        
5027        try:
5028            self._tstaticobject_attributes_slots[self._tstaticobject_attributes_dict[name]] = value
5029            return
5030        except KeyError:
5031            pass
5032            
5033        object.__setattr__(self, name, value)
def tstaticobject_custom_delattr(self, name):
5036def tstaticobject_custom_delattr(self, name):
5037    if name in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or name.startswith('__'):
5038        object.__delattr__(self, name)
5039    else:
5040        if name in self._tstaticobject_attributes_dict:
5041            raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only")
5042        else:
5043            object.__delattr__(self, name)
def tstaticobject_wrap_obj( obj, attributes_dict: typing.Dict, attributes_slots: IList, setable_data_descriptor_field_names: typing.Set[str], init_mapped_attributes: bool):
5046def tstaticobject_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool):
5047    base = obj.__class__
5048    setattr(obj, '_tstaticobject_attributes_dict', attributes_dict)
5049    setattr(obj, '_tstaticobject_attributes_slots', attributes_slots)
5050    setattr(obj, '_tstaticobject_setable_data_descriptor_field_names', setable_data_descriptor_field_names)
5051    if init_mapped_attributes:
5052        object_fields = set(dir(object))
5053        obj_fields = set(dir(obj)) - object_fields
5054        good_fields: List[Hashable] = list()
5055        for key in obj_fields:
5056            value = getattr_static(obj, key)
5057            if key in {'_tstaticobject_attributes_dict', '_tstaticobject_attributes_slots', '_tstaticobject_setable_data_descriptor_field_names'} or key.startswith('__'):
5058                continue
5059
5060            if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5061                continue
5062
5063            if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))):
5064                continue
5065            
5066            if is_setable_data_descriptor(value):
5067                setable_data_descriptor_field_names.add(key)
5068            
5069            good_fields.append(key)
5070        
5071        good_fields_len = len(good_fields)
5072        attributes_slots.set_capacity(good_fields_len)
5073        attributes_slots.extend_with(good_fields_len, 0)
5074        for index, key in enumerate(good_fields):
5075            attributes_dict[key] = index
5076            value = getattr(obj, key)
5077            attributes_slots[index] = value
5078    
5079    NewClass = type(
5080        base.__name__ + 'WrappedByTStaticObject',
5081        (base,),
5082        {
5083            '__getattribute__': tstaticobject_custom_getattribute,
5084            '__setattr__': tstaticobject_custom_setattr,
5085            '__delattr__': tstaticobject_custom_delattr,
5086        }
5087    )
5088    obj.__class__ = NewClass
class TStaticObject:
5091class TStaticObject:
5092    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
5093        offset, real_size = shared_memory.malloc(ObjectType.tstaticobject, 32)
5094        created_items_offsets: List[Offset] = list()
5095        try:
5096            make_changes_inplace: bool = True
5097            if isinstance(obj, ForceStaticObjectCopy):
5098                obj = obj.obj
5099                make_changes_inplace = False
5100            elif isinstance(obj, ForceStaticObjectInplace):
5101                obj = obj.obj
5102                make_changes_inplace = True
5103
5104            dumped_obj: bytes = pickle_dumps(obj)
5105            dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj)
5106            created_items_offsets.append(dumped_obj_offset)
5107            write_uint64(shared_memory.base_address, offset + 16 + 0, dumped_obj_offset)
5108
5109            attributes_dict: Dict = dict()
5110
5111            attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list())
5112            created_items_offsets.append(attributes_slots_offset)
5113            write_uint64(shared_memory.base_address, offset + 16 + 16, attributes_slots_offset)
5114            
5115            setable_data_descriptor_field_names: Set[str] = set()
5116
5117            mapped_obj = None
5118            if make_changes_inplace:
5119                tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5120                mapped_obj = obj
5121            else:
5122                # mapped_obj = self.init_from_shared_memory(shared_memory, offset)
5123                mapped_obj = pickle_loads(dumped_obj)
5124                tstaticobject_wrap_obj(mapped_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5125            
5126            dumped_attributes_dict: bytes = pickle_dumps(attributes_dict)
5127            dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict)
5128            write_uint64(shared_memory.base_address, offset + 16 + 8, dumped_attributes_dict_offset)
5129            
5130            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
5131            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
5132            write_uint64(shared_memory.base_address, offset + 16 + 24, dumped_setable_data_descriptor_field_names_offset)
5133        except:
5134            shared_memory.free(offset)
5135            for item_offset in created_items_offsets:
5136                shared_memory.destroy_obj(item_offset)
5137            
5138            raise
5139        
5140        return mapped_obj, offset, real_size
5141    
5142    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
5143        if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset):
5144            raise WrongObjectTypeError
5145
5146        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5147        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
5148        obj = pickle_loads(dumped_obj)
5149
5150        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5151        attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset)
5152
5153        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5154        dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset)
5155        attributes_dict = pickle_loads(dumped_attributes_dict)
5156
5157        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 24)
5158        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
5159        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
5160
5161        tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False)
5162        return obj
5163    
5164    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5165        if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset):
5166            raise WrongObjectTypeError
5167
5168        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5169        shared_memory.destroy_obj(dumped_obj_offset)
5170        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5171        shared_memory.destroy_obj(attributes_slots_offset)
5172        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5173        shared_memory.destroy_obj(dumped_attributes_dict_offset)
5174        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 24)
5175        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
5176        shared_memory.free(offset)
5177    
5178    # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5179    #     if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + 0):
5180    #         raise WrongObjectTypeError
5181
5182    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5183    #     return shared_memory.get_obj_buffer(dumped_obj_offset)
5184    
5185    # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5186    #     if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset + 0):
5187    #         raise WrongObjectTypeError
5188
5189
5190    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5191    #     return shared_memory.get_obj_buffer_2(dumped_obj_offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: typing.Any) -> Tuple[Any, int, int]:
5092    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
5093        offset, real_size = shared_memory.malloc(ObjectType.tstaticobject, 32)
5094        created_items_offsets: List[Offset] = list()
5095        try:
5096            make_changes_inplace: bool = True
5097            if isinstance(obj, ForceStaticObjectCopy):
5098                obj = obj.obj
5099                make_changes_inplace = False
5100            elif isinstance(obj, ForceStaticObjectInplace):
5101                obj = obj.obj
5102                make_changes_inplace = True
5103
5104            dumped_obj: bytes = pickle_dumps(obj)
5105            dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj)
5106            created_items_offsets.append(dumped_obj_offset)
5107            write_uint64(shared_memory.base_address, offset + 16 + 0, dumped_obj_offset)
5108
5109            attributes_dict: Dict = dict()
5110
5111            attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list())
5112            created_items_offsets.append(attributes_slots_offset)
5113            write_uint64(shared_memory.base_address, offset + 16 + 16, attributes_slots_offset)
5114            
5115            setable_data_descriptor_field_names: Set[str] = set()
5116
5117            mapped_obj = None
5118            if make_changes_inplace:
5119                tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5120                mapped_obj = obj
5121            else:
5122                # mapped_obj = self.init_from_shared_memory(shared_memory, offset)
5123                mapped_obj = pickle_loads(dumped_obj)
5124                tstaticobject_wrap_obj(mapped_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5125            
5126            dumped_attributes_dict: bytes = pickle_dumps(attributes_dict)
5127            dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict)
5128            write_uint64(shared_memory.base_address, offset + 16 + 8, dumped_attributes_dict_offset)
5129            
5130            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
5131            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
5132            write_uint64(shared_memory.base_address, offset + 16 + 24, dumped_setable_data_descriptor_field_names_offset)
5133        except:
5134            shared_memory.free(offset)
5135            for item_offset in created_items_offsets:
5136                shared_memory.destroy_obj(item_offset)
5137            
5138            raise
5139        
5140        return mapped_obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> Any:
5142    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
5143        if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset):
5144            raise WrongObjectTypeError
5145
5146        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5147        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
5148        obj = pickle_loads(dumped_obj)
5149
5150        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5151        attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset)
5152
5153        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5154        dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset)
5155        attributes_dict = pickle_loads(dumped_attributes_dict)
5156
5157        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 24)
5158        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
5159        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
5160
5161        tstaticobject_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False)
5162        return obj
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
5164    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5165        if ObjectType.tstaticobject != read_uint64(shared_memory.base_address, offset):
5166            raise WrongObjectTypeError
5167
5168        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5169        shared_memory.destroy_obj(dumped_obj_offset)
5170        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5171        shared_memory.destroy_obj(attributes_slots_offset)
5172        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5173        shared_memory.destroy_obj(dumped_attributes_dict_offset)
5174        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 24)
5175        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
5176        shared_memory.free(offset)
class StaticObjectWithSlotsOffsets(enum.IntEnum):
5198class StaticObjectWithSlotsOffsets(IntEnum):
5199    pickled_obj = 0
5200    pickled_attributes_dict = 1
5201    attributes_slots = 2
5202    setable_data_descriptor_field_names = 3

An enumeration.

setable_data_descriptor_field_names = <StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names: 3>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
def tstaticobjectwithslots_custom_getattribute(self, name):
5205def tstaticobjectwithslots_custom_getattribute(self, name):
5206    if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'):
5207        return object.__getattribute__(self, name)
5208    
5209    try:
5210        return self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]]
5211    except KeyError:
5212        pass
5213    
5214    return object.__getattribute__(self, name)
def tstaticobjectwithslots_custom_setattr(self, name, value):
5217def tstaticobjectwithslots_custom_setattr(self, name, value):
5218    if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'):
5219        object.__setattr__(self, name, value)
5220    else:
5221        if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5222            object.__setattr__(self, name, value)
5223            return
5224        
5225        # try:
5226        #     if name in self._tstaticobjectwithslots_setable_data_descriptor_field_names:
5227        #         object.__setattr__(self, name, value)
5228        # except AttributeError:
5229        #     pass
5230        
5231        try:
5232            self._tstaticobjectwithslots_attributes_slots[self._tstaticobjectwithslots_attributes_dict[name]] = value
5233            return
5234        except KeyError:
5235            pass
5236            
5237        object.__setattr__(self, name, value)
def tstaticobjectwithslots_custom_delattr(self, name):
5240def tstaticobjectwithslots_custom_delattr(self, name):
5241    if name in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or name.startswith('__'):
5242        object.__delattr__(self, name)
5243    else:
5244        if name in self._tstaticobjectwithslots_attributes_dict:
5245            raise AttributeError(f"'{type(self).__name__}' object attribute '{name}' is read-only")
5246        else:
5247            object.__delattr__(self, name)
def tstaticobjectwithslots_custom_init( self, original, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names):
5250def tstaticobjectwithslots_custom_init(self, original, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names):
5251    setattr(self, '_tstaticobjectwithslots_attributes_dict', attributes_dict)
5252    setattr(self, '_tstaticobjectwithslots_attributes_slots', attributes_slots)
5253    setattr(self, '_tstaticobjectwithslots_setable_data_descriptor_field_names', setable_data_descriptor_field_names)
5254    for attr_name in good_fields:
5255        setattr(self, attr_name, getattr(original, attr_name))
def tstaticobjectwithslots_custom_eq(self, other):
5258def tstaticobjectwithslots_custom_eq(self, other):
5259    parent_class = self.__class__.__bases__[0]
5260    if not isinstance(other, (type(self), parent_class)):
5261        return NotImplemented
5262
5263    for key in self._tstaticobjectwithslots_attributes_dict.keys():
5264        if not hasattr(other, key):
5265            return False
5266        
5267        if getattr(self, key) != getattr(other, key):
5268            return False
5269    
5270    return True
def tstaticobjectwithslots_wrap_obj( obj, attributes_dict: typing.Dict, attributes_slots: IList, setable_data_descriptor_field_names: typing.Set[str], init_mapped_attributes: bool) -> Any:
5273def tstaticobjectwithslots_wrap_obj(obj, attributes_dict: Dict, attributes_slots: IList, setable_data_descriptor_field_names: Set[str], init_mapped_attributes: bool) -> Any:
5274    base = obj.__class__
5275
5276    good_fields: List[Hashable] = list()
5277    if init_mapped_attributes:
5278        if hasattr(base, '__slots__'):
5279            obj_fields = base.__slots__
5280        else:
5281            object_fields = set(dir(object))
5282            obj_fields = set(dir(obj)) - object_fields
5283
5284        for key in obj_fields:
5285            value = getattr_static(obj, key)
5286            if key in {'_tstaticobjectwithslots_attributes_dict', '_tstaticobjectwithslots_attributes_slots', '_tstaticobjectwithslots_setable_data_descriptor_field_names'} or key.startswith('__'):
5287                continue
5288
5289            if isfunction(value) or ismethod(value) or isinstance(value, FrameType) or isinstance(value, CodeType) or ismethoddescriptor(value):
5290                continue
5291
5292            if (not isclass(value)) and (hasattr(value, "__get__") and (not (hasattr(value, "__set__") or hasattr(value, "__delete__")))):
5293                continue
5294            
5295            if is_setable_data_descriptor(value):
5296                setable_data_descriptor_field_names.add(key)
5297            
5298            good_fields.append(key)
5299        
5300        good_fields_len = len(good_fields)
5301        attributes_slots.set_capacity(good_fields_len)
5302        attributes_slots.extend_with(good_fields_len, 0)
5303        for index, key in enumerate(good_fields):
5304            attributes_dict[key] = index
5305            value = getattr(obj, key)
5306            attributes_slots[index] = value
5307    
5308    NewClass = type(
5309        base.__name__ + 'WrappedByTStaticObjectWithSlots',
5310        (base,),
5311        {
5312            '__slots__': ['__dict__'],
5313            '__init__': tstaticobjectwithslots_custom_init,
5314            '__eq__': tstaticobjectwithslots_custom_eq,
5315            '__getattribute__': tstaticobjectwithslots_custom_getattribute,
5316            '__setattr__': tstaticobjectwithslots_custom_setattr,
5317            '__delattr__': tstaticobjectwithslots_custom_delattr,
5318        }
5319    )
5320
5321    new_obj = NewClass(obj, good_fields, attributes_dict, attributes_slots, setable_data_descriptor_field_names)
5322    
5323    return new_obj
class TStaticObjectWithSlots:
5326class TStaticObjectWithSlots:
5327    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
5328        offset, real_size = shared_memory.malloc(ObjectType.tstaticobjectwithslots, 8 * len(StaticObjectWithSlotsOffsets))
5329        created_items_offsets: List[Offset] = list()
5330        try:
5331            dumped_obj: bytes = pickle_dumps(obj)
5332            dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj)
5333            created_items_offsets.append(dumped_obj_offset)
5334            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj, dumped_obj_offset)
5335
5336            attributes_dict: Dict = dict()
5337
5338            attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list())
5339            created_items_offsets.append(attributes_slots_offset)
5340            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots, attributes_slots_offset)
5341            
5342            setable_data_descriptor_field_names: Set[str] = set()
5343
5344            mapped_obj = None
5345            loaded_obj = pickle_loads(dumped_obj)
5346            mapped_obj = tstaticobjectwithslots_wrap_obj(loaded_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5347            
5348            dumped_attributes_dict: bytes = pickle_dumps(attributes_dict)
5349            dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict)
5350            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict, dumped_attributes_dict_offset)
5351            
5352            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
5353            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
5354            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset)
5355        except:
5356            shared_memory.free(offset)
5357            for item_offset in created_items_offsets:
5358                shared_memory.destroy_obj(item_offset)
5359            
5360            raise
5361        
5362        return mapped_obj, offset, real_size
5363    
5364    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
5365        if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset):
5366            raise WrongObjectTypeError
5367
5368        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5369        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
5370        obj = pickle_loads(dumped_obj)
5371
5372        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots)
5373        attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset)
5374
5375        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict)
5376        dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset)
5377        attributes_dict = pickle_loads(dumped_attributes_dict)
5378
5379        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names)
5380        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
5381        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
5382
5383        mapped_obj = tstaticobjectwithslots_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False)
5384        return mapped_obj
5385    
5386    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5387        if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset):
5388            raise WrongObjectTypeError
5389
5390        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5391        shared_memory.destroy_obj(dumped_obj_offset)
5392        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots)
5393        shared_memory.destroy_obj(attributes_slots_offset)
5394        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict)
5395        shared_memory.destroy_obj(dumped_attributes_dict_offset)
5396        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names)
5397        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
5398        shared_memory.free(offset)
5399    
5400    # def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5401    #     if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + 0):
5402    #         raise WrongObjectTypeError
5403
5404    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5405    #     return shared_memory.get_obj_buffer(dumped_obj_offset)
5406    
5407    # def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5408    #     if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset + 0):
5409    #         raise WrongObjectTypeError
5410
5411
5412    #     dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5413    #     return shared_memory.get_obj_buffer_2(dumped_obj_offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, obj: typing.Any) -> Tuple[Any, int, int]:
5327    def map_to_shared_memory(self, shared_memory: 'SharedMemory', obj: Any) -> Tuple[Any, Offset, Size]:
5328        offset, real_size = shared_memory.malloc(ObjectType.tstaticobjectwithslots, 8 * len(StaticObjectWithSlotsOffsets))
5329        created_items_offsets: List[Offset] = list()
5330        try:
5331            dumped_obj: bytes = pickle_dumps(obj)
5332            dumped_mapped_obj, dumped_obj_offset, dumped_obj_size = shared_memory.put_obj(dumped_obj)
5333            created_items_offsets.append(dumped_obj_offset)
5334            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj, dumped_obj_offset)
5335
5336            attributes_dict: Dict = dict()
5337
5338            attributes_slots, attributes_slots_offset, attributes_slots_size = shared_memory.put_obj(list())
5339            created_items_offsets.append(attributes_slots_offset)
5340            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots, attributes_slots_offset)
5341            
5342            setable_data_descriptor_field_names: Set[str] = set()
5343
5344            mapped_obj = None
5345            loaded_obj = pickle_loads(dumped_obj)
5346            mapped_obj = tstaticobjectwithslots_wrap_obj(loaded_obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, True)
5347            
5348            dumped_attributes_dict: bytes = pickle_dumps(attributes_dict)
5349            dumped_mapped_attributes_dict, dumped_attributes_dict_offset, dumped_attributes_dict_size = shared_memory.put_obj(dumped_attributes_dict)
5350            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict, dumped_attributes_dict_offset)
5351            
5352            dumped_setable_data_descriptor_field_names: bytes = pickle_dumps(setable_data_descriptor_field_names)
5353            mapped_dumped_setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset, dumped_setable_data_descriptor_field_names_size = shared_memory.put_obj(dumped_setable_data_descriptor_field_names)
5354            write_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names, dumped_setable_data_descriptor_field_names_offset)
5355        except:
5356            shared_memory.free(offset)
5357            for item_offset in created_items_offsets:
5358                shared_memory.destroy_obj(item_offset)
5359            
5360            raise
5361        
5362        return mapped_obj, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> Any:
5364    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> Any:
5365        if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset):
5366            raise WrongObjectTypeError
5367
5368        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5369        dumped_obj: bytes = shared_memory.get_obj(dumped_obj_offset)
5370        obj = pickle_loads(dumped_obj)
5371
5372        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots)
5373        attributes_slots: IList = shared_memory.get_obj(attributes_slots_offset)
5374
5375        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict)
5376        dumped_attributes_dict = shared_memory.get_obj(dumped_attributes_dict_offset)
5377        attributes_dict = pickle_loads(dumped_attributes_dict)
5378
5379        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names)
5380        dumped_setable_data_descriptor_field_names = shared_memory.get_obj(dumped_setable_data_descriptor_field_names_offset)
5381        setable_data_descriptor_field_names = pickle_loads(dumped_setable_data_descriptor_field_names)
5382
5383        mapped_obj = tstaticobjectwithslots_wrap_obj(obj, attributes_dict, attributes_slots, setable_data_descriptor_field_names, False)
5384        return mapped_obj
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
5386    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5387        if ObjectType.tstaticobjectwithslots != read_uint64(shared_memory.base_address, offset):
5388            raise WrongObjectTypeError
5389
5390        dumped_obj_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_obj)
5391        shared_memory.destroy_obj(dumped_obj_offset)
5392        attributes_slots_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.attributes_slots)
5393        shared_memory.destroy_obj(attributes_slots_offset)
5394        dumped_attributes_dict_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.pickled_attributes_dict)
5395        shared_memory.destroy_obj(dumped_attributes_dict_offset)
5396        dumped_setable_data_descriptor_field_names_offset = read_uint64(shared_memory.base_address, offset + 16 + 8 * StaticObjectWithSlotsOffsets.setable_data_descriptor_field_names)
5397        shared_memory.destroy_obj(dumped_setable_data_descriptor_field_names_offset)
5398        shared_memory.free(offset)
class TNumpyNdarrayOffsets(enum.IntEnum):
5420class TNumpyNdarrayOffsets(IntEnum):
5421    data_buffer_offset = 0
5422    shape_tuple_offset = 1
5423    pickled_datatype_offset = 2

An enumeration.

data_buffer_offset = <TNumpyNdarrayOffsets.data_buffer_offset: 0>
shape_tuple_offset = <TNumpyNdarrayOffsets.shape_tuple_offset: 1>
pickled_datatype_offset = <TNumpyNdarrayOffsets.pickled_datatype_offset: 2>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TNumpyNdarray:
5426class TNumpyNdarray:
5427    def map_to_shared_memory(self, shared_memory: 'SharedMemory', nparray: np.ndarray) -> Tuple[np.ndarray, Offset, Size]:
5428        shape = tuple(nparray.shape)
5429        data_type = nparray.dtype
5430        pickled_data_type = pickle_dumps(data_type)
5431        data_buffer: bytes = nparray.tobytes()
5432        offset, real_size = shared_memory.malloc(ObjectType.tnumpyndarray, 24)
5433        created_items_offsets: List[Offset] = list()
5434        try:
5435            data_buffer_mapped_obj, data_buffer_offset, data_buffer_size = shared_memory.put_obj(data_buffer)
5436            created_items_offsets.append(data_buffer_offset)
5437            shape_mapped_obj, shape_offset, shape_size = shared_memory.put_obj(shape)
5438            created_items_offsets.append(shape_offset)
5439            pickled_data_type_mapped_obj, pickled_data_type_offset, pickled_data_type_size = shared_memory.put_obj(pickled_data_type)
5440            write_uint64(shared_memory.base_address, offset + 16 + 0, data_buffer_offset)
5441            write_uint64(shared_memory.base_address, offset + 16 + 8, shape_offset)
5442            write_uint64(shared_memory.base_address, offset + 16 + 16, pickled_data_type_offset)
5443            mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type)
5444        except:
5445            shared_memory.free(offset)
5446            for item_offset in created_items_offsets:
5447                shared_memory.destroy_obj(item_offset)
5448            
5449            raise
5450
5451        return mapped_nparray, offset, real_size
5452    
5453    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
5454        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset):
5455            raise WrongObjectTypeError
5456
5457        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5458        shape_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5459        pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5460        shape = shared_memory.get_obj(shape_offset)
5461        pickled_data_type = shared_memory.get_obj(pickled_data_type_offset)
5462        data_type = pickle_loads(pickled_data_type)
5463        mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type)
5464        return mapped_nparray
5465    
5466    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5467        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset):
5468            raise WrongObjectTypeError
5469
5470        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5471        shape_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5472        pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5473        shared_memory.destroy_obj(data_buffer_offset)
5474        shared_memory.destroy_obj(shape_offset)
5475        shared_memory.destroy_obj(pickled_data_type_offset)
5476        shared_memory.free(offset)
5477    
5478    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5479        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + 0):
5480            raise WrongObjectTypeError
5481
5482        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5483        return shared_memory.get_obj_buffer(data_buffer_offset)
5484    
5485    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5486        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + 0):
5487            raise WrongObjectTypeError
5488
5489
5490        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5491        return shared_memory.get_obj_buffer_2(data_buffer_offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, nparray: numpy.ndarray) -> Tuple[numpy.ndarray, int, int]:
5427    def map_to_shared_memory(self, shared_memory: 'SharedMemory', nparray: np.ndarray) -> Tuple[np.ndarray, Offset, Size]:
5428        shape = tuple(nparray.shape)
5429        data_type = nparray.dtype
5430        pickled_data_type = pickle_dumps(data_type)
5431        data_buffer: bytes = nparray.tobytes()
5432        offset, real_size = shared_memory.malloc(ObjectType.tnumpyndarray, 24)
5433        created_items_offsets: List[Offset] = list()
5434        try:
5435            data_buffer_mapped_obj, data_buffer_offset, data_buffer_size = shared_memory.put_obj(data_buffer)
5436            created_items_offsets.append(data_buffer_offset)
5437            shape_mapped_obj, shape_offset, shape_size = shared_memory.put_obj(shape)
5438            created_items_offsets.append(shape_offset)
5439            pickled_data_type_mapped_obj, pickled_data_type_offset, pickled_data_type_size = shared_memory.put_obj(pickled_data_type)
5440            write_uint64(shared_memory.base_address, offset + 16 + 0, data_buffer_offset)
5441            write_uint64(shared_memory.base_address, offset + 16 + 8, shape_offset)
5442            write_uint64(shared_memory.base_address, offset + 16 + 16, pickled_data_type_offset)
5443            mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type)
5444        except:
5445            shared_memory.free(offset)
5446            for item_offset in created_items_offsets:
5447                shared_memory.destroy_obj(item_offset)
5448            
5449            raise
5450
5451        return mapped_nparray, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> dict:
5453    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
5454        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset):
5455            raise WrongObjectTypeError
5456
5457        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5458        shape_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5459        pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5460        shape = shared_memory.get_obj(shape_offset)
5461        pickled_data_type = shared_memory.get_obj(pickled_data_type_offset)
5462        data_type = pickle_loads(pickled_data_type)
5463        mapped_nparray: np.ndarray = make_numpy_array_from_obj_offset(shared_memory, data_buffer_offset, shape, data_type)
5464        return mapped_nparray
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
5466    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5467        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset):
5468            raise WrongObjectTypeError
5469
5470        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5471        shape_offset = read_uint64(shared_memory.base_address, offset + 16 + 8)
5472        pickled_data_type_offset = read_uint64(shared_memory.base_address, offset + 16 + 16)
5473        shared_memory.destroy_obj(data_buffer_offset)
5474        shared_memory.destroy_obj(shape_offset)
5475        shared_memory.destroy_obj(pickled_data_type_offset)
5476        shared_memory.free(offset)
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
5478    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5479        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + 0):
5480            raise WrongObjectTypeError
5481
5482        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5483        return shared_memory.get_obj_buffer(data_buffer_offset)
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
5485    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5486        if ObjectType.tnumpyndarray != read_uint64(shared_memory.base_address, offset + 0):
5487            raise WrongObjectTypeError
5488
5489
5490        data_buffer_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5491        return shared_memory.get_obj_buffer_2(data_buffer_offset)
class TTorchTensorOffsets(enum.IntEnum):
5498class TTorchTensorOffsets(IntEnum):
5499    numpy_ndarray_offset = 0

An enumeration.

numpy_ndarray_offset = <TTorchTensorOffsets.numpy_ndarray_offset: 0>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class TTorchTensor:
5502class TTorchTensor:
5503    def map_to_shared_memory(self, shared_memory: 'SharedMemory', tensor: Tensor) -> Tuple[Tensor, Offset, Size]:
5504        offset, real_size = shared_memory.malloc(ObjectType.ttorchtensor, 8)
5505        created_items_offsets: List[Offset] = list()
5506        try:
5507            numpy_ndarray_mapped_obj, numpy_ndarray_offset, numpy_ndarray_size = shared_memory.put_obj(tensor.numpy())
5508            created_items_offsets.append(numpy_ndarray_offset)
5509            write_uint64(shared_memory.base_address, offset + 16 + 0, numpy_ndarray_offset)
5510            mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj)
5511        except:
5512            self._offset = None
5513            shared_memory.free(offset)
5514            for item_offset in created_items_offsets:
5515                shared_memory.destroy_obj(item_offset)
5516            
5517            raise
5518        return mapped_torch_tensor, offset, real_size
5519    
5520    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
5521        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset):
5522            raise WrongObjectTypeError
5523
5524        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5525        numpy_ndarray_mapped_obj: np.ndarray = shared_memory.get_obj(numpy_ndarray_offset)
5526        mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj)
5527        return mapped_torch_tensor
5528    
5529    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5530        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset):
5531            raise WrongObjectTypeError
5532
5533        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5534        shared_memory.destroy_obj(numpy_ndarray_offset)
5535        shared_memory.free(offset)
5536    
5537    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5538        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + 0):
5539            raise WrongObjectTypeError
5540
5541        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5542        return shared_memory.get_obj_buffer(numpy_ndarray_offset)
5543    
5544    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5545        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + 0):
5546            raise WrongObjectTypeError
5547
5548
5549        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5550        return shared_memory.get_obj_buffer_2(numpy_ndarray_offset)
def map_to_shared_memory( self, shared_memory: SharedMemory, tensor: Tensor) -> Tuple[Tensor, int, int]:
5503    def map_to_shared_memory(self, shared_memory: 'SharedMemory', tensor: Tensor) -> Tuple[Tensor, Offset, Size]:
5504        offset, real_size = shared_memory.malloc(ObjectType.ttorchtensor, 8)
5505        created_items_offsets: List[Offset] = list()
5506        try:
5507            numpy_ndarray_mapped_obj, numpy_ndarray_offset, numpy_ndarray_size = shared_memory.put_obj(tensor.numpy())
5508            created_items_offsets.append(numpy_ndarray_offset)
5509            write_uint64(shared_memory.base_address, offset + 16 + 0, numpy_ndarray_offset)
5510            mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj)
5511        except:
5512            self._offset = None
5513            shared_memory.free(offset)
5514            for item_offset in created_items_offsets:
5515                shared_memory.destroy_obj(item_offset)
5516            
5517            raise
5518        return mapped_torch_tensor, offset, real_size
def init_from_shared_memory( self, shared_memory: SharedMemory, offset: int) -> dict:
5520    def init_from_shared_memory(self, shared_memory: 'SharedMemory', offset: Offset) -> dict:
5521        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset):
5522            raise WrongObjectTypeError
5523
5524        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5525        numpy_ndarray_mapped_obj: np.ndarray = shared_memory.get_obj(numpy_ndarray_offset)
5526        mapped_torch_tensor: Tensor = from_numpy(numpy_ndarray_mapped_obj)
5527        return mapped_torch_tensor
def destroy( self, shared_memory: SharedMemory, offset: int) -> None:
5529    def destroy(self, shared_memory: 'SharedMemory', offset: Offset) -> None:
5530        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset):
5531            raise WrongObjectTypeError
5532
5533        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5534        shared_memory.destroy_obj(numpy_ndarray_offset)
5535        shared_memory.free(offset)
def buffer( self, shared_memory: SharedMemory, offset: int) -> memoryview:
5537    def buffer(self, shared_memory: 'SharedMemory', offset: Offset) -> memoryview:
5538        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + 0):
5539            raise WrongObjectTypeError
5540
5541        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5542        return shared_memory.get_obj_buffer(numpy_ndarray_offset)
def buffer_2( self, shared_memory: SharedMemory, offset: int) -> Tuple[int, int]:
5544    def buffer_2(self, shared_memory: 'SharedMemory', offset: Offset) -> Tuple[int, int]:
5545        if ObjectType.ttorchtensor != read_uint64(shared_memory.base_address, offset + 0):
5546            raise WrongObjectTypeError
5547
5548
5549        numpy_ndarray_offset = read_uint64(shared_memory.base_address, offset + 16 + 0)
5550        return shared_memory.get_obj_buffer_2(numpy_ndarray_offset)
codec_by_type: Dict[ObjectType, TBase] = {<ObjectType.tnone: 2>: <TNone object>, <ObjectType.tint: 4>: <TInt object>, <ObjectType.tbool: 3>: <TBool object>, <ObjectType.tfloat: 5>: <TFloat object>, <ObjectType.tcomplex: 6>: <TComplex object>, <ObjectType.tdecimal: 28>: <TDecimal object>, <ObjectType.tdatetime: 29>: <TDatetime object>, <ObjectType.tslice: 27>: <TSlice object>, <ObjectType.tbytes: 8>: <TBytes object>, <ObjectType.tbytearray: 9>: <TBytearray object>, <ObjectType.tstr: 7>: <TStr object>, <ObjectType.tlist: 11>: <TList object>, <ObjectType.ttuple: 10>: <TTuple object>, <ObjectType.tmutableset: 12>: <TMutableSet object>, <ObjectType.tset: 13>: <TSet object>, <ObjectType.tmutablemapping: 14>: <TMutableMapping object>, <ObjectType.tmapping: 15>: <TMapping object>, <ObjectType.tfastset: 26>: <TFastSet object>, <ObjectType.tfastdict: 16>: <TFastDict object>, <ObjectType.tsmallint: 20>: <TSmallInt object>, <ObjectType.tbigint: 21>: <TBigInt object>, <ObjectType.tgeneralobject: 22>: <TGeneralObject object>, <ObjectType.tpickable: 18>: <TGeneralObject object>, <ObjectType.tstaticobject: 25>: <TStaticObject object>, <ObjectType.tstaticobjectwithslots: 30>: <TStaticObjectWithSlots object>, <ObjectType.tnumpyndarray: 23>: <TNumpyNdarray object>, <ObjectType.ttorchtensor: 24>: <TTorchTensor object>}
obj_type_map: Dict[Type, ObjectType] = {}
class MessageOffsets(enum.IntEnum):
5597class MessageOffsets(IntEnum):
5598    previous_message_offset = 0
5599    next_message_offset = 1
5600    item_offset = 2

An enumeration.

previous_message_offset = <MessageOffsets.previous_message_offset: 0>
next_message_offset = <MessageOffsets.next_message_offset: 1>
item_offset = <MessageOffsets.item_offset: 2>
Inherited Members
enum.Enum
name
value
builtins.int
conjugate
bit_length
to_bytes
from_bytes
as_integer_ratio
real
imag
numerator
denominator
class SharedMemory:
5603class SharedMemory:
5604    def __init__(self, name: str, create: bool = False, size: Optional[int] = None, queue_type: QueueType = QueueType.fifo, zero_mem: bool = True, 
5605                 consumer_id: Optional[int] = None, creator_destroy_timeout: float = 5.0, unlink_old: bool = True):
5606        global current_shared_memory_instance
5607        current_shared_memory_instance = self
5608        self._initiated: bool = False
5609        self._consumer_id: Optional[int] = consumer_id
5610        self._creator_destroy_timeout: float = creator_destroy_timeout
5611        self.offset_to_be_monitored: Offset = None
5612        self._malloc_time: float = 0.0
5613        self._realloc_time: float = 0.0
5614        self._name: str = name
5615        self._create: bool = create
5616        self._queue_type: QueueType = queue_type
5617        self._zero_mem: bool = zero_mem
5618        self._last_message_offset: Offset = None
5619        self._asleep_func: Coroutine = self._default_asleep_func
5620
5621        sys_arr_length = 13
5622        self.global_sys_array_len: int = sys_arr_length
5623        arr_byte_size = sys_arr_length * 8
5624        self.global_sys_area_size: int = arr_byte_size
5625
5626        self._size: Optional[int] = size or None
5627        if (size is None) or (0 == size):
5628            size = self.global_sys_area_size
5629            if self._create:
5630                self._size = size
5631        
5632        if self._create:
5633            if unlink_old:
5634                SharedMemory.unlink_by_name(name)
5635            
5636            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=name, create=create, size=size)
5637            self._init_post_mem()
5638            
5639            write_uint64(self.base_address, self.sys_values_offset + 0, self._size)
5640            write_uint64(self.base_address, self.sys_values_offset + 8, sys_arr_length * 8)
5641            write_uint64(self.base_address, self.sys_values_offset + 16, self._size - arr_byte_size)
5642            write_uint64(self.base_address, self.sys_values_offset + 24, self._size)
5643            write_uint64(self.base_address, self.sys_values_offset + 32, sys_arr_length * 8)
5644            write_uint64(self.base_address, self.sys_values_offset + 40, 0)
5645            write_uint64(self.base_address, self.sys_values_offset + 48, 0)
5646            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
5647            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
5648            write_uint64(self.base_address, self.sys_values_offset + 72, 0)
5649            write_uint64(self.base_address, self.sys_values_offset + 80, 0)
5650            write_uint64(self.base_address, self.sys_values_offset + 88, 0)
5651            write_uint64(self.base_address, self.sys_values_offset + 96, 0)
5652            # print(bytes(self._shared_memory.buf[0:120]))
5653
5654            self.free_memory_search_start = self.read_free_memory_search_start()
5655            data_size: int = self.get_data_size()
5656            if self._zero_mem:
5657                zero_memory(self.base_address, self.free_memory_search_start, data_size)
5658            
5659            write_uint64(self.base_address, self.free_memory_search_start + 0, 0)
5660            write_uint64(self.base_address, self.free_memory_search_start + 8, data_size - 16)
5661
5662            self.set_creator_ready()
5663
5664            # print(bytes(self._shared_memory.buf[0:120]))
5665            self.get_data_end_offset()
5666            if self._create:
5667                self._initiated = True
5668            
5669        full_memory_barrier()
5670    
5671    async def _default_asleep_func(self):
5672        await asyncio.sleep(0)
5673    
5674    @property
5675    def size(self) -> int:
5676        return self._size
5677
5678    @property
5679    def name(self) -> str:
5680        return self._name
5681    
5682    @property
5683    def create(self) -> bool:
5684        return self._create
5685    
5686    def _init_post_mem(self):
5687        self.base_address = ctypes.addressof(ctypes.c_char.from_buffer(self._shared_memory.buf))
5688        self.sys_values_offset = 0
5689        # if create:
5690        #     print(f'Creator: {self.base_address=}')
5691        # else:
5692        #     print(f'Consumer: {self.base_address=}')
5693
5694        # self._shared_memory_bytearray = bytearray(self._shared_memory.buf)
5695
5696        # self.sys_arr = np.ndarray((self.global_sys_array_len,), dtype=np.uint64, buffer=self._shared_memory.buf)
5697        # if DEBUG:
5698        #     self.log_arr = np.ndarray((500,), dtype=np.uint64, buffer=self._shared_memory.buf)
5699        # else:
5700        #     self.log_arr = self.sys_arr
5701    
5702    def init_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool:
5703        if self._initiated:
5704            return
5705
5706        if not self.wait_shared_memory_ready(time_limit):
5707            return False
5708        
5709        if (self._size is None) or (0 == self._size):
5710            size: int = self.global_sys_area_size
5711        else:
5712            size = self._size
5713
5714        self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size)
5715        self._init_post_mem()
5716        self.wait_creator_ready()
5717        
5718        if self._size is None:
5719            self._size = read_uint64(self.base_address, self.sys_values_offset + 0)
5720            self._shared_memory.close()
5721            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size)
5722        
5723        self._init_post_mem()
5724        self.free_memory_search_start = self.read_free_memory_search_start()
5725        
5726        self.set_consumer_ready()
5727
5728        # print(bytes(self._shared_memory.buf[0:120]))
5729        self.get_data_end_offset()
5730        self._initiated = True
5731        full_memory_barrier()
5732    
5733    async def ainit_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool:
5734        if self._initiated:
5735            return
5736
5737        if not await self.await_shared_memory_ready(time_limit):
5738            return False
5739        
5740        if (self._size is None) or (0 == self._size):
5741            size: int = self.global_sys_area_size
5742        else:
5743            size = self._size
5744
5745        self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size)
5746        self._init_post_mem()
5747        await self.await_creator_ready(time_limit)
5748        
5749        if self._size is None:
5750            self._size = read_uint64(self.base_address, self.sys_values_offset + 0)
5751            self._shared_memory.close()
5752            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size)
5753        
5754        self._init_post_mem()
5755        self.free_memory_search_start = self.read_free_memory_search_start()
5756        
5757        self.set_consumer_ready()
5758
5759        # print(bytes(self._shared_memory.buf[0:120]))
5760        self.get_data_end_offset()
5761        self._initiated = True
5762        full_memory_barrier()
5763    
5764    def close_consumer(self):
5765        self.set_consumer_closed()
5766        full_memory_barrier()
5767    
5768    def __enter__(self):
5769        return self
5770    
5771    def __exit__(self, exc_type, exc_value, traceback):
5772        self.proper_close()
5773    
5774    async def __aenter__(self):
5775        return self
5776    
5777    async def __aexit__(self, exc_type, exc_value, traceback):
5778        await self.aproper_close()
5779    
5780    def close(self):
5781        self._shared_memory.close()
5782        if self._create:
5783            self._shared_memory.unlink()
5784            SharedMemory.unlink_by_name(self._name)
5785        else:
5786            if 'posix' == os.name:
5787                try:
5788                    from multiprocessing import resource_tracker
5789                    shm_name = f'/{self._name}'
5790                    resource_tracker.unregister(shm_name, "shared_memory")
5791                except FileNotFoundError:
5792                    pass
5793
5794    def proper_close(self):
5795        if self._create:
5796            self.wait_consumer_closed(self._creator_destroy_timeout)
5797        else:
5798            self.close_consumer()
5799        
5800        self.close()
5801
5802    async def aproper_close(self):
5803        if self._create:
5804            await self.await_consumer_closed(self._creator_destroy_timeout)
5805        else:
5806            self.close_consumer()
5807        
5808        self.close()
5809
5810    @staticmethod
5811    def unlink_by_name(shared_memory_name: str):
5812        """`multiprocessing.SharedMemory` requires this cleanup in order to handle the case 
5813            when the previous run of the program was terminated unexpectedly
5814
5815        Args:
5816            shared_memory_name (str): _description_
5817        """        
5818        if 'posix' == os.name:
5819            try:
5820                import _posixshmem
5821                from multiprocessing import resource_tracker
5822                shm_name = f'/{shared_memory_name}'
5823                _posixshmem.shm_unlink(shm_name)
5824                resource_tracker.unregister(shm_name, "shared_memory")
5825            except FileNotFoundError:
5826                pass
5827    
5828    @property
5829    def buf(self):
5830        """A memoryview of contents of the shared memory block.
5831
5832        Returns:
5833            _type_: _description_
5834        """        
5835        return self._shared_memory.buf
5836    
5837    def mem_view(self, offset: Offset, size: Size) -> memoryview:
5838        return self._shared_memory.buf[offset:offset + size]
5839    
5840    def read_mem(self, offset: Offset, size: Size) -> List[int]:
5841        result = list()
5842        for i in range(size):
5843            result.append(read_uint8(self.base_address, offset + i))
5844        
5845        return result
5846    
5847    def print_mem(self, offset: Offset, size: Size, text: str = None):
5848        result = list()
5849        for i in range(size):
5850            result.append(read_uint8(self.base_address, offset + i))
5851        
5852        if text:
5853            print(f'{text.format(offset)}: {result}')
5854        else:
5855            print(f'{result}')
5856    
5857    def set_creator_ready(self):
5858        write_uint64(self.base_address, self.sys_values_offset + 88, 1)
5859    
5860    def set_consumer_ready(self):
5861        write_uint64(self.base_address, self.sys_values_offset + 96, 1)
5862    
5863    def set_consumer_closed(self):
5864        write_uint64(self.base_address, self.sys_values_offset + 96, 0)
5865    
5866    def get_creator_ready(self):
5867        return read_uint64(self.base_address, self.sys_values_offset + 88)
5868    
5869    def get_consumer_ready(self):
5870        return read_uint64(self.base_address, self.sys_values_offset + 96)
5871
5872    def wait_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5873        start_time = cpu_clock()
5874        shared_memory: MultiprocessingSharedMemory = None
5875        while True:
5876            try:
5877                shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False)
5878            except FileNotFoundError as ex:
5879                if time_limit is not None:
5880                    if (cpu_clock() - start_time) > time_limit:
5881                        return False
5882                
5883                if periodic_sleep_time is None:
5884                    continue
5885                else:
5886                    sleep(periodic_sleep_time)
5887            finally:
5888                if shared_memory is not None:
5889                    shared_memory.close()
5890                    return True
5891        
5892        return False
5893
5894    async def await_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5895        start_time = cpu_clock()
5896        shared_memory: MultiprocessingSharedMemory = None
5897        while True:
5898            try:
5899                shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False)
5900            except FileNotFoundError as ex:
5901                if time_limit is not None:
5902                    if (cpu_clock() - start_time) > time_limit:
5903                        return False
5904                
5905                await self._asleep_func()
5906            finally:
5907                if shared_memory is not None:
5908                    shared_memory.close()
5909                    return True
5910        
5911        return False
5912    
5913    def wait_creator_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5914        if self._create:
5915            return
5916        
5917        start_time = cpu_clock()
5918        full_memory_barrier()
5919        while not read_uint64(self.base_address, self.sys_values_offset + 88):
5920            if time_limit is not None:
5921                if (cpu_clock() - start_time) > time_limit:
5922                    return False
5923            
5924            if periodic_sleep_time is None:
5925                mm_pause()
5926            else:
5927                hps_sleep(periodic_sleep_time)
5928            
5929            full_memory_barrier()
5930    
5931    async def await_creator_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5932        if self._create:
5933            return
5934        
5935        start_time = cpu_clock()
5936        full_memory_barrier()
5937        while not read_uint64(self.base_address, self.sys_values_offset + 88):
5938            if time_limit is not None:
5939                if (cpu_clock() - start_time) > time_limit:
5940                    return False
5941            
5942            await self._asleep_func()
5943            
5944            full_memory_barrier()
5945    
5946    def wait_consumer_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5947        if not self._create:
5948            return
5949        
5950        start_time = cpu_clock()
5951        full_memory_barrier()
5952        while not read_uint64(self.base_address, self.sys_values_offset + 96):
5953            if time_limit is not None:
5954                if (cpu_clock() - start_time) > time_limit:
5955                    return False
5956            
5957            if periodic_sleep_time is None:
5958                mm_pause()
5959            else:
5960                hps_sleep(periodic_sleep_time)
5961            
5962            full_memory_barrier()
5963    
5964    async def await_consumer_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5965        if not self._create:
5966            return
5967        
5968        start_time = cpu_clock()
5969        full_memory_barrier()
5970        while not read_uint64(self.base_address, self.sys_values_offset + 96):
5971            if time_limit is not None:
5972                if (cpu_clock() - start_time) > time_limit:
5973                    return False
5974            
5975            await self._asleep_func()
5976            
5977            full_memory_barrier()
5978    
5979    def wait_consumer_closed(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5980        if not self._create:
5981            return
5982        
5983        start_time = cpu_clock()
5984        full_memory_barrier()
5985        while read_uint64(self.base_address, self.sys_values_offset + 96):
5986            if time_limit is not None:
5987                if (cpu_clock() - start_time) > time_limit:
5988                    return False
5989            
5990            if periodic_sleep_time is None:
5991                mm_pause()
5992            else:
5993                hps_sleep(periodic_sleep_time)
5994            
5995            full_memory_barrier()
5996    
5997    async def await_consumer_closed(self, time_limit: Optional[RationalNumber] = None) -> bool:
5998        if not self._create:
5999            return
6000        
6001        start_time = cpu_clock()
6002        full_memory_barrier()
6003        while read_uint64(self.base_address, self.sys_values_offset + 96):
6004            if time_limit is not None:
6005                if (cpu_clock() - start_time) > time_limit:
6006                    return False
6007            
6008            await self._asleep_func()
6009            
6010            full_memory_barrier()
6011    
6012    def creator_in_charge(self) -> bool:
6013        return read_uint64(self.base_address, self.sys_values_offset + 56)
6014    
6015    def consumer_in_charge(self) -> bool:
6016        return read_uint64(self.base_address, self.sys_values_offset + 64)
6017    
6018    def creator_wants_to_be_in_charge(self) -> bool:
6019        return read_uint64(self.base_address, self.sys_values_offset + 72)
6020    
6021    def consumer_wants_to_be_in_charge(self) -> bool:
6022        return read_uint64(self.base_address, self.sys_values_offset + 80)
6023    
6024    def read_free_memory_search_start(self) -> int:
6025        # return self.get_data_start_offset()
6026        return read_uint64(self.base_address, self.sys_values_offset + 32)
6027    
6028    def update_free_memory_search_start(self) -> int:
6029        self.free_memory_search_start = self.read_free_memory_search_start()
6030    
6031    def get_free_memory_search_start(self) -> int:
6032        # self.update_free_memory_search_start()
6033        return self.free_memory_search_start
6034    
6035    def write_free_memory_search_start(self, offset: Offset) -> int:
6036        # return
6037        if ((self.get_data_end_offset() - 16) < offset) or (offset < self.get_data_start_offset()):
6038            offset = self.get_data_start_offset()
6039        
6040        write_uint64(self.base_address, self.sys_values_offset + 32, offset)
6041    
6042    def commit_free_memory_search_start(self):
6043        self.write_free_memory_search_start(self.free_memory_search_start)
6044    
6045    def set_free_memory_search_start(self, offset: Offset) -> int:
6046        # return
6047        if ((self.get_data_end_offset() - 16) < offset) or (offset < self.get_data_start_offset()):
6048            offset = self.get_data_start_offset()
6049        
6050        self.free_memory_search_start = offset
6051        # self.commit_free_memory_search_start()
6052    
6053    def get_last_message_offset(self) -> Optional[Offset]:
6054        return read_uint64(self.base_address, self.sys_values_offset + 48)
6055
6056    def set_last_message_offset(self, offset: Offset):
6057        write_uint64(self.base_address, self.sys_values_offset + 48, offset)
6058    
6059    def get_first_message_offset(self) -> Optional[Offset]:
6060        return read_uint64(self.base_address, self.sys_values_offset + 40)
6061
6062    def set_first_message_offset(self, offset: Offset):
6063        write_uint64(self.base_address, self.sys_values_offset + 40, offset)
6064    
6065    def get_data_start_offset(self) -> Offset:
6066        return read_uint64(self.base_address, self.sys_values_offset + 8)
6067
6068    def get_data_size(self) -> Size:
6069        return read_uint64(self.base_address, self.sys_values_offset + 16)
6070    
6071    def get_data_end_offset(self) -> Offset:
6072        result = read_uint64(self.base_address, self.sys_values_offset + 24)
6073        if result != len(self._shared_memory.buf):
6074            print(result, len(self._shared_memory.buf))
6075        
6076        return result
6077
6078    # def read_uint64(self, offset: Offset) -> int:
6079    #     return read_uint64(self.base_address, offset)
6080    
6081    # def write_uint64(self, offset: Offset, value: int):
6082    #     write_uint64(self.base_address, offset, value)
6083    
6084    def read_uint64(self, offset: Offset) -> int:
6085        return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False)
6086    
6087    def write_uint64(self, offset: Offset, value: int):
6088        self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False)
6089    
6090    # def read_uint32(self, offset: Offset) -> int:
6091    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=False)
6092    
6093    # def write_uint32(self, offset: Offset, value: int):
6094    #     self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=False)
6095    
6096    # def read_uint16(self, offset: Offset) -> int:
6097    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 2], byteorder='little', signed=False)
6098    
6099    # def write_uint16(self, offset: Offset, value: int):
6100    #     self._shared_memory.buf[offset:offset + 2] = value.to_bytes(2, byteorder='little', signed=False)
6101    
6102    # def read_uint8(self, offset: Offset) -> int:
6103    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=False)
6104    
6105    # def write_uint8(self, offset: Offset, value: int):
6106    #     self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=False)
6107    
6108    # def read_int64(self, offset: Offset) -> int:
6109    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=True)
6110    
6111    # def write_int64(self, offset: Offset, value: int):
6112    #     self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=True)
6113    
6114    # def read_int32(self, offset: Offset) -> int:
6115    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=True)
6116    
6117    # def write_int32(self, offset: Offset, value: int):
6118    #     self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=True)
6119    
6120    # def read_int16(self, offset: Offset) -> int:
6121    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 2], byteorder='little', signed=True)
6122    
6123    # def write_int16(self, offset: Offset, value: int):
6124    #     self._shared_memory.buf[offset:offset + 2] = value.to_bytes(2, byteorder='little', signed=True)
6125
6126    # def read_int8(self, offset: Offset) -> int:
6127    #     return int.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=True)
6128    
6129    # def write_int8(self, offset: Offset, value: int):
6130    #     self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=True)
6131
6132    # def read_float(self, offset: Offset) -> float:
6133    #     return float.from_bytes(self._shared_memory.buf[offset:offset + 4], byteorder='little', signed=False)
6134    
6135    # def write_float(self, offset: Offset, value: float):
6136    #     self._shared_memory.buf[offset:offset + 4] = value.to_bytes(4, byteorder='little', signed=False)
6137
6138    # def read_double(self, offset: Offset) -> float:
6139    #     return float.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False)
6140    
6141    # def write_double(self, offset: Offset, value: float):
6142    #     self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False)
6143    
6144    # def read_complex(self, offset: Offset) -> complex:
6145    #     return complex.from_bytes(self._shared_memory.buf[offset:offset + 16], byteorder='little', signed=False)
6146    
6147    # def write_complex(self, offset: Offset, value: complex):
6148    #     self._shared_memory.buf[offset:offset + 16] = value.to_bytes(16, byteorder='little', signed=False)
6149    
6150    # def read_bool(self, offset: Offset) -> bool:
6151    #     return bool.from_bytes(self._shared_memory.buf[offset:offset + 1], byteorder='little', signed=False)
6152    
6153    # def write_bool(self, offset: Offset, value: bool):
6154    #     self._shared_memory.buf[offset:offset + 1] = value.to_bytes(1, byteorder='little', signed=False)
6155    
6156    # def read_str(self, offset: Offset) -> str:
6157    #     size = read_uint64(self.base_address, offset)
6158    #     return self._shared_memory.buf[offset + 8:offset + 8 + size].decode()
6159    
6160    # def read_str_2(self, offset: Offset, size: Size) -> str:
6161    #     return self._shared_memory.buf[offset + 8:offset + 8 + size].decode()
6162    
6163    # def write_str(self, offset: Offset, value: str):
6164    #     size = len(value)
6165    #     write_uint64(self.base_address, offset, size)
6166    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value.encode()
6167    
6168    # def read_bytes(self, offset: Offset) -> bytes:
6169    #     size = read_uint64(self.base_address, offset)
6170    #     return self._shared_memory.buf[offset + 8:offset + 8 + size]
6171
6172    # def read_bytes_2(self, offset: Offset, size: Size) -> bytes:
6173    #     return self._shared_memory.buf[offset + 8:offset + 8 + size]
6174    
6175    # def write_bytes(self, offset: Offset, value: bytes):
6176    #     size = len(value)
6177    #     write_uint64(self.base_address, offset, size)
6178    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6179    
6180    # def read_bytearray(self, offset: Offset) -> bytearray:
6181    #     size = read_uint64(self.base_address, offset)
6182    #     return bytearray(self._shared_memory.buf[offset + 8:offset + 8 + size])
6183    
6184    # def read_bytearray_2(self, offset: Offset, size: Size) -> bytearray:
6185    #     return bytearray(self._shared_memory.buf[offset + 8:offset + 8 + size])
6186    
6187    # def write_bytearray(self, offset: Offset, value: bytearray):
6188    #     size = len(value)
6189    #     write_uint64(self.base_address, offset, size)
6190    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6191    
6192    # def read_tuple(self, offset: Offset) -> tuple:
6193    #     size = read_uint64(self.base_address, offset)
6194    #     return tuple(self._shared_memory.buf[offset + 8:offset + 8 + size])
6195    
6196    # def write_tuple(self, offset: Offset, value: tuple):
6197    #     size = len(value)
6198    #     write_uint64(self.base_address, offset, size)
6199    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6200    
6201    # def read_list(self, offset: Offset) -> list:
6202    #     size = read_uint64(self.base_address, offset)
6203    #     return list(self._shared_memory.buf[offset + 8:offset + 8 + size])
6204    
6205    # def write_list(self, offset: Offset, value: list):
6206    #     size = len(value)
6207    #     write_uint64(self.base_address, offset, size)
6208    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6209
6210    # def read_dict(self, offset: Offset) -> dict:
6211    #     size = read_uint64(self.base_address, offset)
6212    #     return dict(self._shared_memory.buf[offset + 8:offset + 8 + size])
6213    
6214    # def write_dict(self, offset: Offset, value: dict):
6215    #     size = len(value)
6216    #     write_uint64(self.base_address, offset, size)
6217    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6218    
6219    # def read_set(self, offset: Offset) -> set:
6220    #     size = read_uint64(self.base_address, offset)
6221    #     return set(self._shared_memory.buf[offset + 8:offset + 8 + size])
6222    
6223    # def write_set(self, offset: Offset, value: set):
6224    #     size = len(value)
6225    #     write_uint64(self.base_address, offset, size)
6226    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value
6227    
6228    # def read_pickable(self, offset: Offset) -> Any:
6229    #     size = read_uint64(self.base_address, offset)
6230    #     return pickle.loads(self._shared_memory.buf[offset + 8:offset + 8 + size])
6231    
6232    # def write_pickable(self, offset: Offset, value: Any):
6233    #     value_bytes = pickle.dumps(value)
6234    #     size = len(value_bytes)
6235    #     write_uint64(self.base_address, offset, size)
6236    #     self._shared_memory.buf[offset + 8:offset + 8 + size] = value_bytes
6237
6238    # ----------------------------
6239    
6240    def read_obj_type_and_size(self, offset: Offset) -> Tuple[ObjectType, Size]:
6241        obj_type = ObjectType(read_uint64(self.base_address, offset + 0))
6242        size = read_uint64(self.base_address, offset + 8)
6243        return obj_type, size
6244    
6245    def write_obj_type_and_size(self, offset: Offset, obj_type: ObjectType, size: Size):
6246        write_uint64(self.base_address, offset + 0, obj_type.value)
6247        write_uint64(self.base_address, offset + 8, size)
6248        return offset + 16
6249
6250    # ----------------------------
6251    
6252    def test_free_memory_blocks(self, offset: Offset, desired_size: Size, data_end_offset: Offset) -> Tuple[bool, Size, Offset]:
6253        adjusted_size = desired_size
6254        initial_offset = offset
6255        sum_size = 0
6256        max_viable_offset = data_end_offset - 16
6257        last_found_obj_offset = None
6258        last_found_obj_size = None
6259        while True:
6260            last_found_obj_offset = offset
6261            try:
6262                obj_type = ObjectType(read_uint64(self.base_address, offset))
6263            except ValueError:
6264                print(f'Error: {offset=}, {desired_size=}, {sum_size=}')
6265            
6266            size = read_uint64(self.base_address, offset + 8)
6267            if size % 8:
6268                print(f'WRONG SIZE {obj_type=} {size=} {offset=} {desired_size=} {data_end_offset=}')
6269                self.print_mem(offset - 8 * 10, 8 * 10, 'WRONG SIZE - before')
6270                self.print_mem(offset, 8 * 10, 'WRONG SIZE - after')
6271                raise RuntimeError(f'WRONG SIZE: {size=}, {offset=}, {obj_type=}')
6272            
6273            last_found_obj_size = 16 + size
6274            next_block_offset = last_found_obj_offset + last_found_obj_size
6275            if next_block_offset > data_end_offset:
6276                print(f'{next_block_offset=}, {data_end_offset=}, {len(self._shared_memory.buf)=}')
6277                return False, adjusted_size, None, None, next_block_offset
6278
6279            if obj_type is not ObjectType.tfree_memory:
6280                return False, adjusted_size, None, None, next_block_offset
6281
6282            sum_size = next_block_offset - initial_offset
6283
6284            if sum_size == desired_size:
6285                return True, adjusted_size, None, None, next_block_offset
6286
6287            if sum_size > desired_size:
6288                new_next_block_offset = initial_offset + desired_size
6289                new_next_block_size = last_found_obj_size - (new_next_block_offset - last_found_obj_offset)
6290                if new_next_block_size < 16:
6291                    adjusted_size = desired_size + new_next_block_size
6292                    return True, adjusted_size, None, None, next_block_offset
6293                else:
6294                    return True, adjusted_size, new_next_block_offset, new_next_block_size, new_next_block_offset
6295
6296            offset = last_found_obj_offset + last_found_obj_size
6297            if offset > max_viable_offset:
6298                return False, adjusted_size, None, None, next_block_offset
6299
6300    def combine_free_memory_blocks(self, free_mem_block_offset: Offset, size: Size, last_free_block_offset: Offset, last_free_block_new_size: Size, next_block_offset: Offset, mark_block: bool = False) -> Tuple[Size, Offset]:
6301        if mark_block:
6302            self.write_obj_type_and_size(free_mem_block_offset, ObjectType.tfree_memory, size - 16)
6303        
6304        if last_free_block_offset is not None:
6305            if last_free_block_new_size - 16 < 0:
6306                print(f'Error: {last_free_block_new_size=}')
6307            
6308            self.write_obj_type_and_size(last_free_block_offset, ObjectType.tfree_memory, last_free_block_new_size - 16)
6309        
6310        # self.set_free_memory_search_start(next_block_offset)
6311
6312    # ----------------------------
6313    
6314    def malloc(self, obj_type: ObjectType, size: Size, loop_allowed: bool = True, zero_mem: bool = False) -> Tuple[Optional[Offset], Size]:
6315        start_time = cpu_clock()
6316        try:
6317            size += 16
6318            size = nearest_size(size)
6319            adjusted_size = size
6320            initial_start_offset = self.get_free_memory_search_start()
6321            data_end_offset: Offset = self.get_data_end_offset()
6322            search_end_offset = data_end_offset - 16
6323            start_offset = initial_start_offset
6324            free_mem_block_offset: Offset = None
6325            last_free_block_offset: Offset = None
6326            last_free_block_new_size: Size = None
6327            found: bool = False
6328            sum_size: Size = 0
6329            while (not found) and (start_offset <= search_end_offset):
6330                free_mem_block_offset = start_offset
6331                found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset)
6332                start_offset = next_block_offset
6333            
6334            if (not found) and loop_allowed:
6335                start_offset = self.get_data_start_offset()
6336                search_end_offset = initial_start_offset - 16
6337                while (not found) and (start_offset <= search_end_offset):
6338                    free_mem_block_offset = start_offset
6339                    found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset)
6340                    start_offset = next_block_offset
6341
6342            if not found:
6343                raise FreeMemoryChunkNotFoundError(obj_type, size, loop_allowed, zero_mem)
6344            
6345            self.combine_free_memory_blocks(free_mem_block_offset, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset)
6346            obj_size = adjusted_size - 16
6347            self.write_obj_type_and_size(free_mem_block_offset, obj_type, obj_size)
6348            if zero_mem:
6349                # print(f'Zeroing memory 1: {free_mem_block_offset=}, {result_size=}')
6350                # hps_sleep(0.01)
6351                zero_memory(self.base_address, free_mem_block_offset + 16, obj_size)
6352
6353            if free_mem_block_offset % 8:
6354                print(f'Error: {free_mem_block_offset=}, {obj_size=}')
6355                
6356        
6357            self.set_free_memory_search_start(free_mem_block_offset)
6358            return free_mem_block_offset, obj_size
6359        finally:
6360            self._malloc_time += cpu_clock() - start_time
6361    
6362    # def zero_memory(self, offset: Offset, size: Size):
6363    #     # print(f'Zeroing memory 1: [{self.base_address + offset}:{self.base_address + offset + size}], {size=}')
6364    #     self._shared_memory_bytearray[offset:offset + size] = bytearray(size)
6365    
6366    def calloc(self, obj_type: ObjectType, size: Size, num: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]:
6367        return self.malloc(obj_type, size * num, loop_allowed, zero_mem)
6368    
6369    def realloc(self, obj_offset: Offset, new_size: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]:
6370        start_time: float = cpu_clock()
6371        internal_malloc_time: float = 0.0
6372        try:
6373            new_size += 16
6374            new_size = nearest_size(new_size)
6375            data_end_offset: Offset = self.get_data_end_offset()
6376            result_offset: Offset = None
6377            result_obj_size: Size = 0
6378            original_obj_size = read_uint64(self.base_address, obj_offset + 8)
6379            size = original_obj_size + 16
6380            next_obj_offset = obj_offset + size
6381            free_mem_block_offset = next_obj_offset
6382            dsize = new_size - size
6383            found, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(free_mem_block_offset, dsize, data_end_offset)
6384            if found:
6385                self.combine_free_memory_blocks(free_mem_block_offset, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset)
6386                if zero_mem:
6387                    # print(f'Zeroing memory 3: {free_mem_block_offset=}, {result_size=}')
6388                    # hps_sleep(0.01)
6389                    zero_memory(self.base_address, free_mem_block_offset, dsize)
6390                
6391                result_obj_size = new_size - 16
6392                write_uint64(self.base_address, obj_offset + 8, result_obj_size)
6393                self.set_free_memory_search_start(obj_offset)
6394                result_offset = obj_offset
6395            else:
6396                internal_malloc_start_time: float = cpu_clock()
6397                new_offset, result_obj_size = self.malloc(ObjectType(read_uint64(self.base_address, obj_offset + 0)), new_size, loop_allowed)
6398                internal_malloc_time += cpu_clock() - internal_malloc_start_time
6399                if new_offset is None:
6400                    return None, 0
6401
6402                self._shared_memory.buf[new_offset + 16:new_offset + 16 + size] = self._shared_memory.buf[obj_offset + 16:obj_offset + 16 + size]
6403                if zero_mem:
6404                    # print(f'Zeroing memory 4: {new_offset=}, {new_size=}')
6405                    # hps_sleep(0.01)
6406                    zero_memory(self.base_address, new_offset + 16 + original_obj_size, result_obj_size - original_obj_size)
6407                
6408                self.free(obj_offset)
6409                result_offset = new_offset
6410            
6411            return result_offset, result_obj_size
6412        finally:
6413            self._realloc_time += cpu_clock() - start_time - internal_malloc_time
6414    
6415    def free(self, offset: Offset) -> bool:
6416        write_uint64(self.base_address, offset, 0)
6417        return True
6418
6419    # ----------------------------
6420    
6421    def put_obj(self, obj: Any):
6422        obj_type = self._get_obj_type(obj)
6423        codec = codec_by_type[obj_type]
6424        mapped_obj, offset, size = codec.map_to_shared_memory(self, obj)
6425        return mapped_obj, offset, size
6426
6427    def get_obj(self, offset: int) -> Any:
6428        # print(f'get_obj: {offset=}')
6429        obj_type = ObjectType(read_uint64(self.base_address, offset))
6430        if obj_type is ObjectType.tfree_memory:
6431            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6432            raise RuntimeError
6433        
6434        codec = codec_by_type[obj_type]
6435        return codec.init_from_shared_memory(self, offset)
6436
6437    def get_obj_buffer(self, offset: int) -> memoryview:
6438        # print(f'get_obj: {offset=}')
6439        obj_type = ObjectType(read_uint64(self.base_address, offset))
6440        if obj_type is ObjectType.tfree_memory:
6441            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6442            raise RuntimeError
6443        
6444        codec = codec_by_type[obj_type]
6445        return codec.buffer(self, offset)
6446
6447    def get_obj_buffer_2(self, offset: int) -> Tuple[int, int]:
6448        # print(f'get_obj: {offset=}')
6449        obj_type = ObjectType(read_uint64(self.base_address, offset))
6450        if obj_type is ObjectType.tfree_memory:
6451            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6452            raise RuntimeError
6453        
6454        codec = codec_by_type[obj_type]
6455        return codec.buffer_2(self, offset)
6456
6457    def get_obj_mem_view(self, offset: int) -> memoryview:
6458        return self.mem_view(*self.get_obj_buffer_2(offset))
6459
6460    def destroy_obj(self, offset: int) -> Any:
6461        obj_type = ObjectType(read_uint64(self.base_address, offset))
6462        codec = codec_by_type[obj_type]
6463        return codec.destroy(self, offset)
6464
6465    # ----------------------------
6466
6467    def map_object(self, obj: Any) -> Any:
6468        # self.update_free_memory_search_start()
6469        mapped_obj, offset, size = self.put_obj(obj)
6470        # self.commit_free_memory_search_start()
6471        return mapped_obj
6472
6473    def get_object(self, offset: Offset) -> Any:
6474        return self.get_obj(offset)
6475
6476    def destroy_object(self, offset: Offset) -> Any:
6477        return self.destroy_obj(offset)
6478
6479    # ----------------------------
6480
6481    def write_message(self, obj: Any) -> Tuple[Any, Offset, Offset]:
6482        # self.update_free_memory_search_start()
6483        message_offset, message_real_size = self.malloc(ObjectType.tmessage, 24)
6484        try:
6485            mapped_obj, offset, size = self.put_obj(obj)
6486            # self.commit_free_memory_search_start()
6487            last_message_offset: Offset = self.get_last_message_offset()
6488            if last_message_offset:
6489                write_uint64(self.base_address, last_message_offset + 16 + 8, message_offset)
6490            else:
6491                self.set_first_message_offset(message_offset)
6492            
6493            write_uint64(self.base_address, message_offset + 16 + 0, last_message_offset)
6494            write_uint64(self.base_address, message_offset + 16 + 8, 0)
6495            write_uint64(self.base_address, message_offset + 16 + 16, offset)
6496            self.set_last_message_offset(message_offset)
6497        except:
6498            self.free(message_offset)
6499            raise
6500
6501        return mapped_obj, offset, message_offset
6502
6503    def put_message(self, obj: Any) -> Any:
6504        mapped_obj, offset, message_offset = self.write_message(obj)
6505        return mapped_obj
6506    
6507    def put_message_2(self, obj: Any) -> Tuple[Any, Offset]:
6508        mapped_obj, offset, message_offset = self.write_message(obj)
6509        return mapped_obj, offset
6510
6511    def has_messages(self) -> bool:
6512        return self.get_last_message_offset() != 0
6513
6514    def read_message_info(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset], Optional[Offset]]:
6515        # print(0)
6516        if QueueType.fifo == queue_type:
6517            message_offset = self.get_first_message_offset()
6518            # print(f'0.0| {message_offset=}')
6519            if not message_offset:
6520                return None, None, None
6521            
6522            next_message_offset = read_uint64(self.base_address, message_offset + 16 + 8)
6523            self.set_first_message_offset(next_message_offset)
6524            if next_message_offset:
6525                write_uint64(self.base_address, next_message_offset + 16 + 0, 0)
6526            else:
6527                self.set_last_message_offset(0)
6528        else:
6529            message_offset = self.get_last_message_offset()
6530            # print(f'0.1| {message_offset=}')
6531            if not message_offset:
6532                return None, None, None
6533            
6534            prev_message_offset = read_uint64(self.base_address, message_offset + 16 + 0)
6535            self.set_last_message_offset(prev_message_offset)
6536            if prev_message_offset:
6537                write_uint64(self.base_address, prev_message_offset + 16 + 8, 0)
6538            else:
6539                self.set_first_message_offset(0)
6540        
6541        # print(1)
6542        obj_offset = read_uint64(self.base_address, message_offset + 16 + 16)
6543        # print(2)
6544        if not obj_offset:
6545            return None, None, message_offset
6546
6547        # print(3)
6548        obj = self.get_obj(obj_offset)
6549        # print(4)
6550        return obj, obj_offset, message_offset
6551
6552    def destroy_message(self, message_offset: Offset):
6553        if not message_offset:
6554            return
6555        
6556        # obj_offset = read_uint64(self.base_address, message_offset + 16 + 16)
6557        # if obj_offset:
6558        #     self.destroy_obj(obj_offset)
6559        
6560        # self.destroy_obj(message_offset)
6561
6562        self.free(message_offset)
6563    
6564    def read_message(self, queue_type: QueueType = QueueType.fifo) -> Any:
6565        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6566        if message_offset:
6567            return obj
6568        else:
6569            raise NoMessagesInQueueError
6570    
6571    def read_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]:
6572        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6573        if message_offset:
6574            return obj, obj_offset
6575        else:
6576            raise NoMessagesInQueueError
6577
6578    def take_message(self, queue_type: QueueType = QueueType.fifo) -> Any:
6579        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6580        if message_offset:
6581            self.destroy_message(message_offset)
6582        else:
6583            raise NoMessagesInQueueError
6584        
6585        return obj
6586
6587    def take_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]:
6588        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6589        if message_offset:
6590            self.destroy_message(message_offset)
6591        else:
6592            raise NoMessagesInQueueError
6593        
6594        return obj, obj_offset
6595    
6596    def get_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any:
6597        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6598        if message_offset:
6599            return obj
6600        else:
6601            return default
6602    
6603    def get_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]:
6604        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6605        if message_offset:
6606            return obj, obj_offset
6607        else:
6608            return default, None
6609
6610    def pop_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any:
6611        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6612        if message_offset:
6613            self.destroy_message(message_offset)
6614        else:
6615            obj = default
6616        
6617        return obj
6618
6619    def pop_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]:
6620        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6621        if message_offset:
6622            self.destroy_message(message_offset)
6623        else:
6624            obj = default
6625            obj_offset = None
6626        
6627        return obj, obj_offset
6628
6629    # ----------------------------
6630
6631    def get_in_line(self) -> bool:
6632        if self._create:
6633            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6634            write_uint64(self.base_address, self.sys_values_offset + 72, 1)
6635            full_memory_barrier()
6636            if self.consumer_in_charge():
6637                return False
6638            else:
6639                write_uint64(self.base_address, self.sys_values_offset + 56, 1)
6640                full_memory_barrier()
6641                write_uint64(self.base_address, self.sys_values_offset + 72, 0)
6642                full_memory_barrier()
6643                self.update_free_memory_search_start()
6644                if self.consumer_in_charge():
6645                    write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6646                    full_memory_barrier()
6647                    write_uint64(self.base_address, self.sys_values_offset + 72, 1)
6648                    full_memory_barrier()
6649                    return False
6650
6651                return True
6652        else:
6653            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6654            write_uint64(self.base_address, self.sys_values_offset + 80, 1)
6655            full_memory_barrier()
6656            if self.creator_in_charge():
6657                return False
6658            else:
6659                write_uint64(self.base_address, self.sys_values_offset + 64, 1)
6660                full_memory_barrier()
6661                write_uint64(self.base_address, self.sys_values_offset + 80, 0)
6662                full_memory_barrier()
6663                self.update_free_memory_search_start()
6664                if self.creator_in_charge():
6665                    write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6666                    full_memory_barrier()
6667                    write_uint64(self.base_address, self.sys_values_offset + 80, 1)
6668                    full_memory_barrier()
6669                    return False
6670                
6671                return True
6672            
6673    def release(self):
6674        self.commit_free_memory_search_start()
6675        if self._create:
6676            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6677            write_uint64(self.base_address, self.sys_values_offset + 72, 0)
6678            full_memory_barrier()
6679        else:
6680            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6681            write_uint64(self.base_address, self.sys_values_offset + 80, 0)
6682            full_memory_barrier()
6683
6684    def wait_my_turn(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
6685        start_time = cpu_clock()
6686        while not self.get_in_line():
6687            if time_limit is not None:
6688                if (cpu_clock() - start_time) > time_limit:
6689                    return False
6690            
6691            if periodic_sleep_time is None:
6692                mm_pause()
6693            else:
6694                hps_sleep(periodic_sleep_time)
6695        
6696        return True
6697
6698    async def await_my_turn(self, time_limit: Optional[RationalNumber] = None) -> bool:
6699        start_time = cpu_clock()
6700        while not self.get_in_line():
6701            if time_limit is not None:
6702                if (cpu_clock() - start_time) > time_limit:
6703                    return False
6704            
6705            await self._asleep_func()
6706        
6707        return True
6708
6709    # ----------------------------
6710
6711    def wait_for_messages(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
6712        start_time = cpu_clock()
6713        has_messages = False
6714        while not has_messages:
6715            if time_limit is not None:
6716                if (cpu_clock() - start_time) > time_limit:
6717                    return False
6718            
6719            if periodic_sleep_time is None:
6720                mm_pause()
6721            else:
6722                hps_sleep(periodic_sleep_time)
6723
6724            with wait_my_turn(self):
6725                has_messages = self.has_messages()
6726        
6727        return True
6728
6729    async def await_for_messages(self, time_limit: Optional[RationalNumber] = None) -> bool:
6730        start_time = cpu_clock()
6731        has_messages = False
6732        while not has_messages:
6733            if time_limit is not None:
6734                if (cpu_clock() - start_time) > time_limit:
6735                    return False
6736            
6737            await self._asleep_func()
6738
6739            with await_my_turn(self, time_limit):
6740                has_messages = self.has_messages()
6741        
6742        return True
6743
6744    # ----------------------------
6745
6746    @staticmethod
6747    def _get_obj_type(obj: Any) -> ObjectType:
6748        obj_type = type(obj)
6749        if obj is None:
6750            obj_type_atom: ObjectType = ObjectType.tnone
6751        elif obj_type is bool:
6752            obj_type_atom = ObjectType.tbool
6753        elif obj_type is int:
6754            obj_type_atom = ObjectType.tint
6755        elif obj_type is float:
6756            obj_type_atom = ObjectType.tfloat
6757        elif obj_type is complex:
6758            obj_type_atom = ObjectType.tcomplex
6759        elif obj_type is Decimal:
6760            obj_type_atom = ObjectType.tdecimal
6761        elif obj_type is slice:
6762            obj_type_atom = ObjectType.tslice
6763        elif obj_type is str:
6764            obj_type_atom = ObjectType.tstr
6765        elif obj_type is bytes:
6766            obj_type_atom = ObjectType.tbytes
6767        elif obj_type is bytearray:
6768            obj_type_atom = ObjectType.tbytearray
6769        elif obj_type is tuple:
6770            obj_type_atom = ObjectType.ttuple
6771        elif obj_type is list:
6772            obj_type_atom = ObjectType.tlist
6773        elif obj_type in {datetime, timedelta, timezone, date, time}:
6774            obj_type_atom = ObjectType.tdatetime
6775        elif issubclass(obj_type, FastLimitedSet):
6776            obj_type_atom = ObjectType.tfastset
6777        elif issubclass(obj_type, AbsMutableSet):
6778            obj_type_atom = ObjectType.tmutableset
6779        elif issubclass(obj_type, AbsSet):
6780            obj_type_atom = ObjectType.tset
6781        elif issubclass(obj_type, FastLimitedDict):
6782            obj_type_atom = ObjectType.tfastdict
6783        elif issubclass(obj_type, ForceMapping):
6784            obj_type_atom = ObjectType.tmapping
6785        elif issubclass(obj_type, AbsMutableMapping):
6786            obj_type_atom = ObjectType.tmutablemapping
6787        elif issubclass(obj_type, AbsMapping):
6788            obj_type_atom = ObjectType.tmapping
6789        elif obj_type is SmallInt:
6790            obj_type_atom = ObjectType.tsmallint
6791        elif obj_type is BigInt:
6792            obj_type_atom = ObjectType.tbigint
6793        elif issubclass(obj_type, Tensor):
6794            obj_type_atom = ObjectType.ttorchtensor
6795        elif issubclass(obj_type, np.ndarray):
6796            obj_type_atom = ObjectType.tnumpyndarray
6797        elif issubclass(obj_type, (ForceGeneralObjectCopy, ForceGeneralObjectInplace)):
6798            obj_type_atom = ObjectType.tgeneralobject
6799        elif issubclass(obj_type, (ForceStaticObjectCopy, ForceStaticObjectInplace)):
6800            obj_type_atom = ObjectType.tstaticobject
6801        elif obj_type in obj_type_map:
6802            obj_type_atom = obj_type_map[obj_type]
6803        # elif hasattr(obj, '__dict__'):
6804        #     obj_type_atom = ObjectType.tgeneralobject
6805        # else:
6806        #     obj_type_atom = ObjectType.tpickable
6807        elif hasattr(obj, '__slots__') or ((not hasattr(obj, '__slots__')) and (not hasattr(obj, '__dict__'))):
6808            obj_type_atom = ObjectType.tstaticobjectwithslots
6809        else:
6810            # obj_type_atom = ObjectType.tgeneralobject
6811            obj_type_atom = ObjectType.tstaticobject
6812        
6813        return obj_type_atom
SharedMemory( name: str, create: bool = False, size: typing.Union[int, NoneType] = None, queue_type: QueueType = <QueueType.fifo: 0>, zero_mem: bool = True, consumer_id: typing.Union[int, NoneType] = None, creator_destroy_timeout: float = 5.0, unlink_old: bool = True)
5604    def __init__(self, name: str, create: bool = False, size: Optional[int] = None, queue_type: QueueType = QueueType.fifo, zero_mem: bool = True, 
5605                 consumer_id: Optional[int] = None, creator_destroy_timeout: float = 5.0, unlink_old: bool = True):
5606        global current_shared_memory_instance
5607        current_shared_memory_instance = self
5608        self._initiated: bool = False
5609        self._consumer_id: Optional[int] = consumer_id
5610        self._creator_destroy_timeout: float = creator_destroy_timeout
5611        self.offset_to_be_monitored: Offset = None
5612        self._malloc_time: float = 0.0
5613        self._realloc_time: float = 0.0
5614        self._name: str = name
5615        self._create: bool = create
5616        self._queue_type: QueueType = queue_type
5617        self._zero_mem: bool = zero_mem
5618        self._last_message_offset: Offset = None
5619        self._asleep_func: Coroutine = self._default_asleep_func
5620
5621        sys_arr_length = 13
5622        self.global_sys_array_len: int = sys_arr_length
5623        arr_byte_size = sys_arr_length * 8
5624        self.global_sys_area_size: int = arr_byte_size
5625
5626        self._size: Optional[int] = size or None
5627        if (size is None) or (0 == size):
5628            size = self.global_sys_area_size
5629            if self._create:
5630                self._size = size
5631        
5632        if self._create:
5633            if unlink_old:
5634                SharedMemory.unlink_by_name(name)
5635            
5636            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=name, create=create, size=size)
5637            self._init_post_mem()
5638            
5639            write_uint64(self.base_address, self.sys_values_offset + 0, self._size)
5640            write_uint64(self.base_address, self.sys_values_offset + 8, sys_arr_length * 8)
5641            write_uint64(self.base_address, self.sys_values_offset + 16, self._size - arr_byte_size)
5642            write_uint64(self.base_address, self.sys_values_offset + 24, self._size)
5643            write_uint64(self.base_address, self.sys_values_offset + 32, sys_arr_length * 8)
5644            write_uint64(self.base_address, self.sys_values_offset + 40, 0)
5645            write_uint64(self.base_address, self.sys_values_offset + 48, 0)
5646            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
5647            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
5648            write_uint64(self.base_address, self.sys_values_offset + 72, 0)
5649            write_uint64(self.base_address, self.sys_values_offset + 80, 0)
5650            write_uint64(self.base_address, self.sys_values_offset + 88, 0)
5651            write_uint64(self.base_address, self.sys_values_offset + 96, 0)
5652            # print(bytes(self._shared_memory.buf[0:120]))
5653
5654            self.free_memory_search_start = self.read_free_memory_search_start()
5655            data_size: int = self.get_data_size()
5656            if self._zero_mem:
5657                zero_memory(self.base_address, self.free_memory_search_start, data_size)
5658            
5659            write_uint64(self.base_address, self.free_memory_search_start + 0, 0)
5660            write_uint64(self.base_address, self.free_memory_search_start + 8, data_size - 16)
5661
5662            self.set_creator_ready()
5663
5664            # print(bytes(self._shared_memory.buf[0:120]))
5665            self.get_data_end_offset()
5666            if self._create:
5667                self._initiated = True
5668            
5669        full_memory_barrier()
offset_to_be_monitored: int
global_sys_array_len: int
global_sys_area_size: int
size: int
5674    @property
5675    def size(self) -> int:
5676        return self._size
name: str
5678    @property
5679    def name(self) -> str:
5680        return self._name
create: bool
5682    @property
5683    def create(self) -> bool:
5684        return self._create
def init_consumer(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
5702    def init_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool:
5703        if self._initiated:
5704            return
5705
5706        if not self.wait_shared_memory_ready(time_limit):
5707            return False
5708        
5709        if (self._size is None) or (0 == self._size):
5710            size: int = self.global_sys_area_size
5711        else:
5712            size = self._size
5713
5714        self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size)
5715        self._init_post_mem()
5716        self.wait_creator_ready()
5717        
5718        if self._size is None:
5719            self._size = read_uint64(self.base_address, self.sys_values_offset + 0)
5720            self._shared_memory.close()
5721            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size)
5722        
5723        self._init_post_mem()
5724        self.free_memory_search_start = self.read_free_memory_search_start()
5725        
5726        self.set_consumer_ready()
5727
5728        # print(bytes(self._shared_memory.buf[0:120]))
5729        self.get_data_end_offset()
5730        self._initiated = True
5731        full_memory_barrier()
async def ainit_consumer(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
5733    async def ainit_consumer(self, time_limit: Optional[RationalNumber] = None) -> bool:
5734        if self._initiated:
5735            return
5736
5737        if not await self.await_shared_memory_ready(time_limit):
5738            return False
5739        
5740        if (self._size is None) or (0 == self._size):
5741            size: int = self.global_sys_area_size
5742        else:
5743            size = self._size
5744
5745        self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=size)
5746        self._init_post_mem()
5747        await self.await_creator_ready(time_limit)
5748        
5749        if self._size is None:
5750            self._size = read_uint64(self.base_address, self.sys_values_offset + 0)
5751            self._shared_memory.close()
5752            self._shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=self._create, size=self._size)
5753        
5754        self._init_post_mem()
5755        self.free_memory_search_start = self.read_free_memory_search_start()
5756        
5757        self.set_consumer_ready()
5758
5759        # print(bytes(self._shared_memory.buf[0:120]))
5760        self.get_data_end_offset()
5761        self._initiated = True
5762        full_memory_barrier()
def close_consumer(self):
5764    def close_consumer(self):
5765        self.set_consumer_closed()
5766        full_memory_barrier()
def close(self):
5780    def close(self):
5781        self._shared_memory.close()
5782        if self._create:
5783            self._shared_memory.unlink()
5784            SharedMemory.unlink_by_name(self._name)
5785        else:
5786            if 'posix' == os.name:
5787                try:
5788                    from multiprocessing import resource_tracker
5789                    shm_name = f'/{self._name}'
5790                    resource_tracker.unregister(shm_name, "shared_memory")
5791                except FileNotFoundError:
5792                    pass
def proper_close(self):
5794    def proper_close(self):
5795        if self._create:
5796            self.wait_consumer_closed(self._creator_destroy_timeout)
5797        else:
5798            self.close_consumer()
5799        
5800        self.close()
async def aproper_close(self):
5802    async def aproper_close(self):
5803        if self._create:
5804            await self.await_consumer_closed(self._creator_destroy_timeout)
5805        else:
5806            self.close_consumer()
5807        
5808        self.close()
buf
5828    @property
5829    def buf(self):
5830        """A memoryview of contents of the shared memory block.
5831
5832        Returns:
5833            _type_: _description_
5834        """        
5835        return self._shared_memory.buf

A memoryview of contents of the shared memory block.

Returns: _type_: _description_

def mem_view(self, offset: int, size: int) -> memoryview:
5837    def mem_view(self, offset: Offset, size: Size) -> memoryview:
5838        return self._shared_memory.buf[offset:offset + size]
def read_mem(self, offset: int, size: int) -> List[int]:
5840    def read_mem(self, offset: Offset, size: Size) -> List[int]:
5841        result = list()
5842        for i in range(size):
5843            result.append(read_uint8(self.base_address, offset + i))
5844        
5845        return result
def print_mem(self, offset: int, size: int, text: str = None):
5847    def print_mem(self, offset: Offset, size: Size, text: str = None):
5848        result = list()
5849        for i in range(size):
5850            result.append(read_uint8(self.base_address, offset + i))
5851        
5852        if text:
5853            print(f'{text.format(offset)}: {result}')
5854        else:
5855            print(f'{result}')
def set_creator_ready(self):
5857    def set_creator_ready(self):
5858        write_uint64(self.base_address, self.sys_values_offset + 88, 1)
def set_consumer_ready(self):
5860    def set_consumer_ready(self):
5861        write_uint64(self.base_address, self.sys_values_offset + 96, 1)
def set_consumer_closed(self):
5863    def set_consumer_closed(self):
5864        write_uint64(self.base_address, self.sys_values_offset + 96, 0)
def get_creator_ready(self):
5866    def get_creator_ready(self):
5867        return read_uint64(self.base_address, self.sys_values_offset + 88)
def get_consumer_ready(self):
5869    def get_consumer_ready(self):
5870        return read_uint64(self.base_address, self.sys_values_offset + 96)
def wait_shared_memory_ready( self, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09) -> bool:
5872    def wait_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5873        start_time = cpu_clock()
5874        shared_memory: MultiprocessingSharedMemory = None
5875        while True:
5876            try:
5877                shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False)
5878            except FileNotFoundError as ex:
5879                if time_limit is not None:
5880                    if (cpu_clock() - start_time) > time_limit:
5881                        return False
5882                
5883                if periodic_sleep_time is None:
5884                    continue
5885                else:
5886                    sleep(periodic_sleep_time)
5887            finally:
5888                if shared_memory is not None:
5889                    shared_memory.close()
5890                    return True
5891        
5892        return False
async def await_shared_memory_ready(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
5894    async def await_shared_memory_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5895        start_time = cpu_clock()
5896        shared_memory: MultiprocessingSharedMemory = None
5897        while True:
5898            try:
5899                shared_memory: MultiprocessingSharedMemory = MultiprocessingSharedMemory(name=self._name, create=False)
5900            except FileNotFoundError as ex:
5901                if time_limit is not None:
5902                    if (cpu_clock() - start_time) > time_limit:
5903                        return False
5904                
5905                await self._asleep_func()
5906            finally:
5907                if shared_memory is not None:
5908                    shared_memory.close()
5909                    return True
5910        
5911        return False
def wait_creator_ready( self, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09) -> bool:
5913    def wait_creator_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5914        if self._create:
5915            return
5916        
5917        start_time = cpu_clock()
5918        full_memory_barrier()
5919        while not read_uint64(self.base_address, self.sys_values_offset + 88):
5920            if time_limit is not None:
5921                if (cpu_clock() - start_time) > time_limit:
5922                    return False
5923            
5924            if periodic_sleep_time is None:
5925                mm_pause()
5926            else:
5927                hps_sleep(periodic_sleep_time)
5928            
5929            full_memory_barrier()
async def await_creator_ready(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
5931    async def await_creator_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5932        if self._create:
5933            return
5934        
5935        start_time = cpu_clock()
5936        full_memory_barrier()
5937        while not read_uint64(self.base_address, self.sys_values_offset + 88):
5938            if time_limit is not None:
5939                if (cpu_clock() - start_time) > time_limit:
5940                    return False
5941            
5942            await self._asleep_func()
5943            
5944            full_memory_barrier()
def wait_consumer_ready( self, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09) -> bool:
5946    def wait_consumer_ready(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5947        if not self._create:
5948            return
5949        
5950        start_time = cpu_clock()
5951        full_memory_barrier()
5952        while not read_uint64(self.base_address, self.sys_values_offset + 96):
5953            if time_limit is not None:
5954                if (cpu_clock() - start_time) > time_limit:
5955                    return False
5956            
5957            if periodic_sleep_time is None:
5958                mm_pause()
5959            else:
5960                hps_sleep(periodic_sleep_time)
5961            
5962            full_memory_barrier()
async def await_consumer_ready(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
5964    async def await_consumer_ready(self, time_limit: Optional[RationalNumber] = None) -> bool:
5965        if not self._create:
5966            return
5967        
5968        start_time = cpu_clock()
5969        full_memory_barrier()
5970        while not read_uint64(self.base_address, self.sys_values_offset + 96):
5971            if time_limit is not None:
5972                if (cpu_clock() - start_time) > time_limit:
5973                    return False
5974            
5975            await self._asleep_func()
5976            
5977            full_memory_barrier()
def wait_consumer_closed( self, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09) -> bool:
5979    def wait_consumer_closed(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
5980        if not self._create:
5981            return
5982        
5983        start_time = cpu_clock()
5984        full_memory_barrier()
5985        while read_uint64(self.base_address, self.sys_values_offset + 96):
5986            if time_limit is not None:
5987                if (cpu_clock() - start_time) > time_limit:
5988                    return False
5989            
5990            if periodic_sleep_time is None:
5991                mm_pause()
5992            else:
5993                hps_sleep(periodic_sleep_time)
5994            
5995            full_memory_barrier()
async def await_consumer_closed(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
5997    async def await_consumer_closed(self, time_limit: Optional[RationalNumber] = None) -> bool:
5998        if not self._create:
5999            return
6000        
6001        start_time = cpu_clock()
6002        full_memory_barrier()
6003        while read_uint64(self.base_address, self.sys_values_offset + 96):
6004            if time_limit is not None:
6005                if (cpu_clock() - start_time) > time_limit:
6006                    return False
6007            
6008            await self._asleep_func()
6009            
6010            full_memory_barrier()
def creator_in_charge(self) -> bool:
6012    def creator_in_charge(self) -> bool:
6013        return read_uint64(self.base_address, self.sys_values_offset + 56)
def consumer_in_charge(self) -> bool:
6015    def consumer_in_charge(self) -> bool:
6016        return read_uint64(self.base_address, self.sys_values_offset + 64)
def creator_wants_to_be_in_charge(self) -> bool:
6018    def creator_wants_to_be_in_charge(self) -> bool:
6019        return read_uint64(self.base_address, self.sys_values_offset + 72)
def consumer_wants_to_be_in_charge(self) -> bool:
6021    def consumer_wants_to_be_in_charge(self) -> bool:
6022        return read_uint64(self.base_address, self.sys_values_offset + 80)
def read_free_memory_search_start(self) -> int:
6024    def read_free_memory_search_start(self) -> int:
6025        # return self.get_data_start_offset()
6026        return read_uint64(self.base_address, self.sys_values_offset + 32)
def update_free_memory_search_start(self) -> int:
6028    def update_free_memory_search_start(self) -> int:
6029        self.free_memory_search_start = self.read_free_memory_search_start()
def get_free_memory_search_start(self) -> int:
6031    def get_free_memory_search_start(self) -> int:
6032        # self.update_free_memory_search_start()
6033        return self.free_memory_search_start
def write_free_memory_search_start(self, offset: int) -> int:
6035    def write_free_memory_search_start(self, offset: Offset) -> int:
6036        # return
6037        if ((self.get_data_end_offset() - 16) < offset) or (offset < self.get_data_start_offset()):
6038            offset = self.get_data_start_offset()
6039        
6040        write_uint64(self.base_address, self.sys_values_offset + 32, offset)
def commit_free_memory_search_start(self):
6042    def commit_free_memory_search_start(self):
6043        self.write_free_memory_search_start(self.free_memory_search_start)
def set_free_memory_search_start(self, offset: int) -> int:
6045    def set_free_memory_search_start(self, offset: Offset) -> int:
6046        # return
6047        if ((self.get_data_end_offset() - 16) < offset) or (offset < self.get_data_start_offset()):
6048            offset = self.get_data_start_offset()
6049        
6050        self.free_memory_search_start = offset
6051        # self.commit_free_memory_search_start()
def get_last_message_offset(self) -> Union[int, NoneType]:
6053    def get_last_message_offset(self) -> Optional[Offset]:
6054        return read_uint64(self.base_address, self.sys_values_offset + 48)
def set_last_message_offset(self, offset: int):
6056    def set_last_message_offset(self, offset: Offset):
6057        write_uint64(self.base_address, self.sys_values_offset + 48, offset)
def get_first_message_offset(self) -> Union[int, NoneType]:
6059    def get_first_message_offset(self) -> Optional[Offset]:
6060        return read_uint64(self.base_address, self.sys_values_offset + 40)
def set_first_message_offset(self, offset: int):
6062    def set_first_message_offset(self, offset: Offset):
6063        write_uint64(self.base_address, self.sys_values_offset + 40, offset)
def get_data_start_offset(self) -> int:
6065    def get_data_start_offset(self) -> Offset:
6066        return read_uint64(self.base_address, self.sys_values_offset + 8)
def get_data_size(self) -> int:
6068    def get_data_size(self) -> Size:
6069        return read_uint64(self.base_address, self.sys_values_offset + 16)
def get_data_end_offset(self) -> int:
6071    def get_data_end_offset(self) -> Offset:
6072        result = read_uint64(self.base_address, self.sys_values_offset + 24)
6073        if result != len(self._shared_memory.buf):
6074            print(result, len(self._shared_memory.buf))
6075        
6076        return result
def read_uint64(self, offset: int) -> int:
6084    def read_uint64(self, offset: Offset) -> int:
6085        return int.from_bytes(self._shared_memory.buf[offset:offset + 8], byteorder='little', signed=False)
def write_uint64(self, offset: int, value: int):
6087    def write_uint64(self, offset: Offset, value: int):
6088        self._shared_memory.buf[offset:offset + 8] = value.to_bytes(8, byteorder='little', signed=False)
def read_obj_type_and_size( self, offset: int) -> Tuple[ObjectType, int]:
6240    def read_obj_type_and_size(self, offset: Offset) -> Tuple[ObjectType, Size]:
6241        obj_type = ObjectType(read_uint64(self.base_address, offset + 0))
6242        size = read_uint64(self.base_address, offset + 8)
6243        return obj_type, size
def write_obj_type_and_size( self, offset: int, obj_type: ObjectType, size: int):
6245    def write_obj_type_and_size(self, offset: Offset, obj_type: ObjectType, size: Size):
6246        write_uint64(self.base_address, offset + 0, obj_type.value)
6247        write_uint64(self.base_address, offset + 8, size)
6248        return offset + 16
def test_free_memory_blocks( self, offset: int, desired_size: int, data_end_offset: int) -> Tuple[bool, int, int]:
6252    def test_free_memory_blocks(self, offset: Offset, desired_size: Size, data_end_offset: Offset) -> Tuple[bool, Size, Offset]:
6253        adjusted_size = desired_size
6254        initial_offset = offset
6255        sum_size = 0
6256        max_viable_offset = data_end_offset - 16
6257        last_found_obj_offset = None
6258        last_found_obj_size = None
6259        while True:
6260            last_found_obj_offset = offset
6261            try:
6262                obj_type = ObjectType(read_uint64(self.base_address, offset))
6263            except ValueError:
6264                print(f'Error: {offset=}, {desired_size=}, {sum_size=}')
6265            
6266            size = read_uint64(self.base_address, offset + 8)
6267            if size % 8:
6268                print(f'WRONG SIZE {obj_type=} {size=} {offset=} {desired_size=} {data_end_offset=}')
6269                self.print_mem(offset - 8 * 10, 8 * 10, 'WRONG SIZE - before')
6270                self.print_mem(offset, 8 * 10, 'WRONG SIZE - after')
6271                raise RuntimeError(f'WRONG SIZE: {size=}, {offset=}, {obj_type=}')
6272            
6273            last_found_obj_size = 16 + size
6274            next_block_offset = last_found_obj_offset + last_found_obj_size
6275            if next_block_offset > data_end_offset:
6276                print(f'{next_block_offset=}, {data_end_offset=}, {len(self._shared_memory.buf)=}')
6277                return False, adjusted_size, None, None, next_block_offset
6278
6279            if obj_type is not ObjectType.tfree_memory:
6280                return False, adjusted_size, None, None, next_block_offset
6281
6282            sum_size = next_block_offset - initial_offset
6283
6284            if sum_size == desired_size:
6285                return True, adjusted_size, None, None, next_block_offset
6286
6287            if sum_size > desired_size:
6288                new_next_block_offset = initial_offset + desired_size
6289                new_next_block_size = last_found_obj_size - (new_next_block_offset - last_found_obj_offset)
6290                if new_next_block_size < 16:
6291                    adjusted_size = desired_size + new_next_block_size
6292                    return True, adjusted_size, None, None, next_block_offset
6293                else:
6294                    return True, adjusted_size, new_next_block_offset, new_next_block_size, new_next_block_offset
6295
6296            offset = last_found_obj_offset + last_found_obj_size
6297            if offset > max_viable_offset:
6298                return False, adjusted_size, None, None, next_block_offset
def combine_free_memory_blocks( self, free_mem_block_offset: int, size: int, last_free_block_offset: int, last_free_block_new_size: int, next_block_offset: int, mark_block: bool = False) -> Tuple[int, int]:
6300    def combine_free_memory_blocks(self, free_mem_block_offset: Offset, size: Size, last_free_block_offset: Offset, last_free_block_new_size: Size, next_block_offset: Offset, mark_block: bool = False) -> Tuple[Size, Offset]:
6301        if mark_block:
6302            self.write_obj_type_and_size(free_mem_block_offset, ObjectType.tfree_memory, size - 16)
6303        
6304        if last_free_block_offset is not None:
6305            if last_free_block_new_size - 16 < 0:
6306                print(f'Error: {last_free_block_new_size=}')
6307            
6308            self.write_obj_type_and_size(last_free_block_offset, ObjectType.tfree_memory, last_free_block_new_size - 16)
6309        
6310        # self.set_free_memory_search_start(next_block_offset)
def malloc( self, obj_type: ObjectType, size: int, loop_allowed: bool = True, zero_mem: bool = False) -> Tuple[Union[int, NoneType], int]:
6314    def malloc(self, obj_type: ObjectType, size: Size, loop_allowed: bool = True, zero_mem: bool = False) -> Tuple[Optional[Offset], Size]:
6315        start_time = cpu_clock()
6316        try:
6317            size += 16
6318            size = nearest_size(size)
6319            adjusted_size = size
6320            initial_start_offset = self.get_free_memory_search_start()
6321            data_end_offset: Offset = self.get_data_end_offset()
6322            search_end_offset = data_end_offset - 16
6323            start_offset = initial_start_offset
6324            free_mem_block_offset: Offset = None
6325            last_free_block_offset: Offset = None
6326            last_free_block_new_size: Size = None
6327            found: bool = False
6328            sum_size: Size = 0
6329            while (not found) and (start_offset <= search_end_offset):
6330                free_mem_block_offset = start_offset
6331                found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset)
6332                start_offset = next_block_offset
6333            
6334            if (not found) and loop_allowed:
6335                start_offset = self.get_data_start_offset()
6336                search_end_offset = initial_start_offset - 16
6337                while (not found) and (start_offset <= search_end_offset):
6338                    free_mem_block_offset = start_offset
6339                    found, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(start_offset, size, data_end_offset)
6340                    start_offset = next_block_offset
6341
6342            if not found:
6343                raise FreeMemoryChunkNotFoundError(obj_type, size, loop_allowed, zero_mem)
6344            
6345            self.combine_free_memory_blocks(free_mem_block_offset, adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset)
6346            obj_size = adjusted_size - 16
6347            self.write_obj_type_and_size(free_mem_block_offset, obj_type, obj_size)
6348            if zero_mem:
6349                # print(f'Zeroing memory 1: {free_mem_block_offset=}, {result_size=}')
6350                # hps_sleep(0.01)
6351                zero_memory(self.base_address, free_mem_block_offset + 16, obj_size)
6352
6353            if free_mem_block_offset % 8:
6354                print(f'Error: {free_mem_block_offset=}, {obj_size=}')
6355                
6356        
6357            self.set_free_memory_search_start(free_mem_block_offset)
6358            return free_mem_block_offset, obj_size
6359        finally:
6360            self._malloc_time += cpu_clock() - start_time
def calloc( self, obj_type: ObjectType, size: int, num: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Union[int, NoneType], int]:
6366    def calloc(self, obj_type: ObjectType, size: Size, num: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]:
6367        return self.malloc(obj_type, size * num, loop_allowed, zero_mem)
def realloc( self, obj_offset: int, new_size: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Union[int, NoneType], int]:
6369    def realloc(self, obj_offset: Offset, new_size: int, loop_allowed: bool = True, zero_mem: bool = True) -> Tuple[Optional[Offset], Size]:
6370        start_time: float = cpu_clock()
6371        internal_malloc_time: float = 0.0
6372        try:
6373            new_size += 16
6374            new_size = nearest_size(new_size)
6375            data_end_offset: Offset = self.get_data_end_offset()
6376            result_offset: Offset = None
6377            result_obj_size: Size = 0
6378            original_obj_size = read_uint64(self.base_address, obj_offset + 8)
6379            size = original_obj_size + 16
6380            next_obj_offset = obj_offset + size
6381            free_mem_block_offset = next_obj_offset
6382            dsize = new_size - size
6383            found, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset = self.test_free_memory_blocks(free_mem_block_offset, dsize, data_end_offset)
6384            if found:
6385                self.combine_free_memory_blocks(free_mem_block_offset, additional_adjusted_size, last_free_block_offset, last_free_block_new_size, next_block_offset)
6386                if zero_mem:
6387                    # print(f'Zeroing memory 3: {free_mem_block_offset=}, {result_size=}')
6388                    # hps_sleep(0.01)
6389                    zero_memory(self.base_address, free_mem_block_offset, dsize)
6390                
6391                result_obj_size = new_size - 16
6392                write_uint64(self.base_address, obj_offset + 8, result_obj_size)
6393                self.set_free_memory_search_start(obj_offset)
6394                result_offset = obj_offset
6395            else:
6396                internal_malloc_start_time: float = cpu_clock()
6397                new_offset, result_obj_size = self.malloc(ObjectType(read_uint64(self.base_address, obj_offset + 0)), new_size, loop_allowed)
6398                internal_malloc_time += cpu_clock() - internal_malloc_start_time
6399                if new_offset is None:
6400                    return None, 0
6401
6402                self._shared_memory.buf[new_offset + 16:new_offset + 16 + size] = self._shared_memory.buf[obj_offset + 16:obj_offset + 16 + size]
6403                if zero_mem:
6404                    # print(f'Zeroing memory 4: {new_offset=}, {new_size=}')
6405                    # hps_sleep(0.01)
6406                    zero_memory(self.base_address, new_offset + 16 + original_obj_size, result_obj_size - original_obj_size)
6407                
6408                self.free(obj_offset)
6409                result_offset = new_offset
6410            
6411            return result_offset, result_obj_size
6412        finally:
6413            self._realloc_time += cpu_clock() - start_time - internal_malloc_time
def free(self, offset: int) -> bool:
6415    def free(self, offset: Offset) -> bool:
6416        write_uint64(self.base_address, offset, 0)
6417        return True
def put_obj(self, obj: typing.Any):
6421    def put_obj(self, obj: Any):
6422        obj_type = self._get_obj_type(obj)
6423        codec = codec_by_type[obj_type]
6424        mapped_obj, offset, size = codec.map_to_shared_memory(self, obj)
6425        return mapped_obj, offset, size
def get_obj(self, offset: int) -> Any:
6427    def get_obj(self, offset: int) -> Any:
6428        # print(f'get_obj: {offset=}')
6429        obj_type = ObjectType(read_uint64(self.base_address, offset))
6430        if obj_type is ObjectType.tfree_memory:
6431            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6432            raise RuntimeError
6433        
6434        codec = codec_by_type[obj_type]
6435        return codec.init_from_shared_memory(self, offset)
def get_obj_buffer(self, offset: int) -> memoryview:
6437    def get_obj_buffer(self, offset: int) -> memoryview:
6438        # print(f'get_obj: {offset=}')
6439        obj_type = ObjectType(read_uint64(self.base_address, offset))
6440        if obj_type is ObjectType.tfree_memory:
6441            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6442            raise RuntimeError
6443        
6444        codec = codec_by_type[obj_type]
6445        return codec.buffer(self, offset)
def get_obj_buffer_2(self, offset: int) -> Tuple[int, int]:
6447    def get_obj_buffer_2(self, offset: int) -> Tuple[int, int]:
6448        # print(f'get_obj: {offset=}')
6449        obj_type = ObjectType(read_uint64(self.base_address, offset))
6450        if obj_type is ObjectType.tfree_memory:
6451            # self.print_mem(offset - 32, 96, 'get_obj [offset - 32: offset + 64]. {}')
6452            raise RuntimeError
6453        
6454        codec = codec_by_type[obj_type]
6455        return codec.buffer_2(self, offset)
def get_obj_mem_view(self, offset: int) -> memoryview:
6457    def get_obj_mem_view(self, offset: int) -> memoryview:
6458        return self.mem_view(*self.get_obj_buffer_2(offset))
def destroy_obj(self, offset: int) -> Any:
6460    def destroy_obj(self, offset: int) -> Any:
6461        obj_type = ObjectType(read_uint64(self.base_address, offset))
6462        codec = codec_by_type[obj_type]
6463        return codec.destroy(self, offset)
def map_object(self, obj: typing.Any) -> Any:
6467    def map_object(self, obj: Any) -> Any:
6468        # self.update_free_memory_search_start()
6469        mapped_obj, offset, size = self.put_obj(obj)
6470        # self.commit_free_memory_search_start()
6471        return mapped_obj
def get_object(self, offset: int) -> Any:
6473    def get_object(self, offset: Offset) -> Any:
6474        return self.get_obj(offset)
def destroy_object(self, offset: int) -> Any:
6476    def destroy_object(self, offset: Offset) -> Any:
6477        return self.destroy_obj(offset)
def write_message(self, obj: typing.Any) -> Tuple[Any, int, int]:
6481    def write_message(self, obj: Any) -> Tuple[Any, Offset, Offset]:
6482        # self.update_free_memory_search_start()
6483        message_offset, message_real_size = self.malloc(ObjectType.tmessage, 24)
6484        try:
6485            mapped_obj, offset, size = self.put_obj(obj)
6486            # self.commit_free_memory_search_start()
6487            last_message_offset: Offset = self.get_last_message_offset()
6488            if last_message_offset:
6489                write_uint64(self.base_address, last_message_offset + 16 + 8, message_offset)
6490            else:
6491                self.set_first_message_offset(message_offset)
6492            
6493            write_uint64(self.base_address, message_offset + 16 + 0, last_message_offset)
6494            write_uint64(self.base_address, message_offset + 16 + 8, 0)
6495            write_uint64(self.base_address, message_offset + 16 + 16, offset)
6496            self.set_last_message_offset(message_offset)
6497        except:
6498            self.free(message_offset)
6499            raise
6500
6501        return mapped_obj, offset, message_offset
def put_message(self, obj: typing.Any) -> Any:
6503    def put_message(self, obj: Any) -> Any:
6504        mapped_obj, offset, message_offset = self.write_message(obj)
6505        return mapped_obj
def put_message_2(self, obj: typing.Any) -> Tuple[Any, int]:
6507    def put_message_2(self, obj: Any) -> Tuple[Any, Offset]:
6508        mapped_obj, offset, message_offset = self.write_message(obj)
6509        return mapped_obj, offset
def has_messages(self) -> bool:
6511    def has_messages(self) -> bool:
6512        return self.get_last_message_offset() != 0
def read_message_info( self, queue_type: QueueType = <QueueType.fifo: 0>) -> Tuple[Any, Union[int, NoneType], Union[int, NoneType]]:
6514    def read_message_info(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset], Optional[Offset]]:
6515        # print(0)
6516        if QueueType.fifo == queue_type:
6517            message_offset = self.get_first_message_offset()
6518            # print(f'0.0| {message_offset=}')
6519            if not message_offset:
6520                return None, None, None
6521            
6522            next_message_offset = read_uint64(self.base_address, message_offset + 16 + 8)
6523            self.set_first_message_offset(next_message_offset)
6524            if next_message_offset:
6525                write_uint64(self.base_address, next_message_offset + 16 + 0, 0)
6526            else:
6527                self.set_last_message_offset(0)
6528        else:
6529            message_offset = self.get_last_message_offset()
6530            # print(f'0.1| {message_offset=}')
6531            if not message_offset:
6532                return None, None, None
6533            
6534            prev_message_offset = read_uint64(self.base_address, message_offset + 16 + 0)
6535            self.set_last_message_offset(prev_message_offset)
6536            if prev_message_offset:
6537                write_uint64(self.base_address, prev_message_offset + 16 + 8, 0)
6538            else:
6539                self.set_first_message_offset(0)
6540        
6541        # print(1)
6542        obj_offset = read_uint64(self.base_address, message_offset + 16 + 16)
6543        # print(2)
6544        if not obj_offset:
6545            return None, None, message_offset
6546
6547        # print(3)
6548        obj = self.get_obj(obj_offset)
6549        # print(4)
6550        return obj, obj_offset, message_offset
def destroy_message(self, message_offset: int):
6552    def destroy_message(self, message_offset: Offset):
6553        if not message_offset:
6554            return
6555        
6556        # obj_offset = read_uint64(self.base_address, message_offset + 16 + 16)
6557        # if obj_offset:
6558        #     self.destroy_obj(obj_offset)
6559        
6560        # self.destroy_obj(message_offset)
6561
6562        self.free(message_offset)
def read_message( self, queue_type: QueueType = <QueueType.fifo: 0>) -> Any:
6564    def read_message(self, queue_type: QueueType = QueueType.fifo) -> Any:
6565        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6566        if message_offset:
6567            return obj
6568        else:
6569            raise NoMessagesInQueueError
def read_message_2( self, queue_type: QueueType = <QueueType.fifo: 0>) -> Tuple[Any, int]:
6571    def read_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]:
6572        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6573        if message_offset:
6574            return obj, obj_offset
6575        else:
6576            raise NoMessagesInQueueError
def take_message( self, queue_type: QueueType = <QueueType.fifo: 0>) -> Any:
6578    def take_message(self, queue_type: QueueType = QueueType.fifo) -> Any:
6579        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6580        if message_offset:
6581            self.destroy_message(message_offset)
6582        else:
6583            raise NoMessagesInQueueError
6584        
6585        return obj
def take_message_2( self, queue_type: QueueType = <QueueType.fifo: 0>) -> Tuple[Any, int]:
6587    def take_message_2(self, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Offset]:
6588        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6589        if message_offset:
6590            self.destroy_message(message_offset)
6591        else:
6592            raise NoMessagesInQueueError
6593        
6594        return obj, obj_offset
def get_message( self, default=None, queue_type: QueueType = <QueueType.fifo: 0>) -> Any:
6596    def get_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any:
6597        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6598        if message_offset:
6599            return obj
6600        else:
6601            return default
def get_message_2( self, default=None, queue_type: QueueType = <QueueType.fifo: 0>) -> Tuple[Any, Union[int, NoneType]]:
6603    def get_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]:
6604        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6605        if message_offset:
6606            return obj, obj_offset
6607        else:
6608            return default, None
def pop_message( self, default=None, queue_type: QueueType = <QueueType.fifo: 0>) -> Any:
6610    def pop_message(self, default = None, queue_type: QueueType = QueueType.fifo) -> Any:
6611        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6612        if message_offset:
6613            self.destroy_message(message_offset)
6614        else:
6615            obj = default
6616        
6617        return obj
def pop_message_2( self, default=None, queue_type: QueueType = <QueueType.fifo: 0>) -> Tuple[Any, Union[int, NoneType]]:
6619    def pop_message_2(self, default = None, queue_type: QueueType = QueueType.fifo) -> Tuple[Any, Optional[Offset]]:
6620        obj, obj_offset, message_offset = self.read_message_info(queue_type)
6621        if message_offset:
6622            self.destroy_message(message_offset)
6623        else:
6624            obj = default
6625            obj_offset = None
6626        
6627        return obj, obj_offset
def get_in_line(self) -> bool:
6631    def get_in_line(self) -> bool:
6632        if self._create:
6633            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6634            write_uint64(self.base_address, self.sys_values_offset + 72, 1)
6635            full_memory_barrier()
6636            if self.consumer_in_charge():
6637                return False
6638            else:
6639                write_uint64(self.base_address, self.sys_values_offset + 56, 1)
6640                full_memory_barrier()
6641                write_uint64(self.base_address, self.sys_values_offset + 72, 0)
6642                full_memory_barrier()
6643                self.update_free_memory_search_start()
6644                if self.consumer_in_charge():
6645                    write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6646                    full_memory_barrier()
6647                    write_uint64(self.base_address, self.sys_values_offset + 72, 1)
6648                    full_memory_barrier()
6649                    return False
6650
6651                return True
6652        else:
6653            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6654            write_uint64(self.base_address, self.sys_values_offset + 80, 1)
6655            full_memory_barrier()
6656            if self.creator_in_charge():
6657                return False
6658            else:
6659                write_uint64(self.base_address, self.sys_values_offset + 64, 1)
6660                full_memory_barrier()
6661                write_uint64(self.base_address, self.sys_values_offset + 80, 0)
6662                full_memory_barrier()
6663                self.update_free_memory_search_start()
6664                if self.creator_in_charge():
6665                    write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6666                    full_memory_barrier()
6667                    write_uint64(self.base_address, self.sys_values_offset + 80, 1)
6668                    full_memory_barrier()
6669                    return False
6670                
6671                return True
def release(self):
6673    def release(self):
6674        self.commit_free_memory_search_start()
6675        if self._create:
6676            write_uint64(self.base_address, self.sys_values_offset + 56, 0)
6677            write_uint64(self.base_address, self.sys_values_offset + 72, 0)
6678            full_memory_barrier()
6679        else:
6680            write_uint64(self.base_address, self.sys_values_offset + 64, 0)
6681            write_uint64(self.base_address, self.sys_values_offset + 80, 0)
6682            full_memory_barrier()
def wait_my_turn( self, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09) -> bool:
6684    def wait_my_turn(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
6685        start_time = cpu_clock()
6686        while not self.get_in_line():
6687            if time_limit is not None:
6688                if (cpu_clock() - start_time) > time_limit:
6689                    return False
6690            
6691            if periodic_sleep_time is None:
6692                mm_pause()
6693            else:
6694                hps_sleep(periodic_sleep_time)
6695        
6696        return True
async def await_my_turn(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
6698    async def await_my_turn(self, time_limit: Optional[RationalNumber] = None) -> bool:
6699        start_time = cpu_clock()
6700        while not self.get_in_line():
6701            if time_limit is not None:
6702                if (cpu_clock() - start_time) > time_limit:
6703                    return False
6704            
6705            await self._asleep_func()
6706        
6707        return True
def wait_for_messages( self, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09) -> bool:
6711    def wait_for_messages(self, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001) -> bool:
6712        start_time = cpu_clock()
6713        has_messages = False
6714        while not has_messages:
6715            if time_limit is not None:
6716                if (cpu_clock() - start_time) > time_limit:
6717                    return False
6718            
6719            if periodic_sleep_time is None:
6720                mm_pause()
6721            else:
6722                hps_sleep(periodic_sleep_time)
6723
6724            with wait_my_turn(self):
6725                has_messages = self.has_messages()
6726        
6727        return True
async def await_for_messages(self, time_limit: typing.Union[int, float, NoneType] = None) -> bool:
6729    async def await_for_messages(self, time_limit: Optional[RationalNumber] = None) -> bool:
6730        start_time = cpu_clock()
6731        has_messages = False
6732        while not has_messages:
6733            if time_limit is not None:
6734                if (cpu_clock() - start_time) > time_limit:
6735                    return False
6736            
6737            await self._asleep_func()
6738
6739            with await_my_turn(self, time_limit):
6740                has_messages = self.has_messages()
6741        
6742        return True
class GetInLine:
6825class GetInLine:
6826    def __init__(self, shared_memory: SharedMemory):
6827        self.shared_memory: SharedMemory = shared_memory
6828    
6829    def __enter__(self):
6830        self.shared_memory.get_in_line()
6831        return
6832    
6833    def __exit__(self, exc_type, exc_value, traceback):
6834        self.shared_memory.release()
GetInLine( shared_memory: SharedMemory)
6826    def __init__(self, shared_memory: SharedMemory):
6827        self.shared_memory: SharedMemory = shared_memory
shared_memory: SharedMemory
get_in_line = <class 'GetInLine'>
class WaitMyTurn:
6849class WaitMyTurn:
6850    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6851        self.shared_memory: SharedMemory = shared_memory
6852        self.time_limit: Optional[RationalNumber] = time_limit
6853        self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
6854    
6855    def __enter__(self):
6856        self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time)
6857        return
6858    
6859    def __exit__(self, exc_type, exc_value, traceback):
6860        self.shared_memory.release()
WaitMyTurn( shared_memory: SharedMemory, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09)
6850    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6851        self.shared_memory: SharedMemory = shared_memory
6852        self.time_limit: Optional[RationalNumber] = time_limit
6853        self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
shared_memory: SharedMemory
time_limit: Union[int, float, NoneType]
periodic_sleep_time: Union[int, float, NoneType]
wait_my_turn = <class 'WaitMyTurn'>
class WaitMyTurnWhenHasMessages:
6882class WaitMyTurnWhenHasMessages:
6883    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6884        self.shared_memory: SharedMemory = shared_memory
6885        self.time_limit: Optional[RationalNumber] = time_limit
6886        self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
6887    
6888    def __enter__(self):
6889        while True:
6890            if not self.shared_memory.wait_my_turn(self.time_limit, self.periodic_sleep_time):
6891                raise OperationTimedOutError
6892            
6893            if self.shared_memory.has_messages():
6894                return
6895            else:
6896                self.shared_memory.release()
6897    
6898    def __exit__(self, exc_type, exc_value, traceback):
6899        self.shared_memory.release()
WaitMyTurnWhenHasMessages( shared_memory: SharedMemory, time_limit: typing.Union[int, float, NoneType] = None, periodic_sleep_time: typing.Union[int, float, NoneType] = 1e-09)
6883    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None, periodic_sleep_time: Optional[RationalNumber] = 0.000000001):
6884        self.shared_memory: SharedMemory = shared_memory
6885        self.time_limit: Optional[RationalNumber] = time_limit
6886        self.periodic_sleep_time: Optional[RationalNumber] = periodic_sleep_time
shared_memory: SharedMemory
time_limit: Union[int, float, NoneType]
periodic_sleep_time: Union[int, float, NoneType]
wait_my_turn_when_has_messages = <class 'WaitMyTurnWhenHasMessages'>
class await_my_turn:
6905class await_my_turn:
6906    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None):
6907        self.shared_memory: SharedMemory = shared_memory
6908        self.time_limit: Optional[RationalNumber] = time_limit
6909    
6910    async def __aenter__(self):
6911        await self.shared_memory.await_my_turn(self.time_limit)
6912    
6913    async def __aexit__(self, exc_type, exc_val, exc_tb):
6914        self.shared_memory.release()
await_my_turn( shared_memory: SharedMemory, time_limit: typing.Union[int, float, NoneType] = None)
6906    def __init__(self, shared_memory: SharedMemory, time_limit: Optional[RationalNumber] = None):
6907        self.shared_memory: SharedMemory = shared_memory
6908        self.time_limit: Optional[RationalNumber] = time_limit
shared_memory: SharedMemory
time_limit: Union[int, float, NoneType]
def numpy_array_memory_size(np_shape, np_dtype):
6917def numpy_array_memory_size(np_shape, np_dtype):
6918    num_elements = np.prod(np_shape)
6919    element_size = np.dtype(np_dtype).itemsize
6920    memory_size_bytes = num_elements * element_size
6921    return memory_size_bytes
def numpy_array_made_from_pointer_memory_size(np_shape, ctypes_type) -> int:
6924def numpy_array_made_from_pointer_memory_size(np_shape, ctypes_type) -> int:
6925    num_elements: int = np.prod(np_shape)
6926    element_size: int = ctypes.sizeof(ctypes_type)
6927    memory_size_bytes: int = num_elements * element_size
6928    return memory_size_bytes
def make_numpy_array_from_obj_offset( shared_memory: SharedMemory, offset: int, np_shape, np_dtype_or_ctypes_type=None) -> Any:
6933def make_numpy_array_from_obj_offset(shared_memory: SharedMemory, offset: Offset, np_shape, np_dtype_or_ctypes_type = None) -> Any:
6934    if np_dtype_or_ctypes_type is None:
6935        np_dtype_or_ctypes_type = ctypes.c_uint8
6936    
6937    data_offset, data_size = shared_memory.get_obj_buffer_2(offset)
6938    if isinstance(np_dtype_or_ctypes_type, _SimpleCData):
6939        num_elements = np.prod(np_shape)
6940        np_array_size = num_elements * ctypes.sizeof(np_dtype_or_ctypes_type)
6941        if data_size < np_array_size:
6942            raise ObjBufferIsSmallerThanRequestedNumpyArrayError(data_size, np_array_size)
6943        
6944        data_address = shared_memory.base_address + data_offset
6945        void_ptr = ctypes.c_void_p(data_address)
6946        # actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type * num_elements))
6947        actual_ptr = ctypes.cast(void_ptr, ctypes.POINTER(np_dtype_or_ctypes_type))
6948        return np.ctypeslib.as_array(actual_ptr, shape=np_shape)
6949    else:
6950        return np.ndarray(np_shape, dtype=np_dtype_or_ctypes_type, buffer=shared_memory.mem_view(data_offset, data_size))
def zero_bytes_from_numpy_array(np: numpy.ndarray) -> bytes:
6953def zero_bytes_from_numpy_array(np: np.ndarray) -> bytes:
6954    return bytes(np.nbytes)
def bytes_from_numpy_array(np: numpy.ndarray) -> bytes:
6957def bytes_from_numpy_array(np: np.ndarray) -> bytes:
6958    return np.tobytes()
def dict_to_list(mapping: collections.abc.Mapping) -> List:
6961def dict_to_list(mapping: AbsMapping) -> List:
6962    items_num = max(mapping.keys())
6963    result = [None] * items_num
6964    for key, value in mapping.items():
6965        result[key] = value
6966    
6967    return result
def list_to_dict(data_list: typing.List) -> Dict:
6970def list_to_dict(data_list: List) -> Dict:
6971    return {key: value for key, value in enumerate(data_list)}
def intenum_dict_to_list( mapping: collections.abc.Mapping, int_enum_class: typing.Union[typing.Type, NoneType] = None) -> List:
6974def intenum_dict_to_list(mapping: AbsMapping, int_enum_class: Optional[Type] = None) -> List:
6975    if int_enum_class:
6976        items_num = len(int_enum_class)
6977    else:
6978        first_key_type_detected: bool = False
6979        for first_key in mapping.keys():
6980            first_key_type = type(first_key)
6981            if issubclass(first_key_type, IntEnum):
6982                items_num = len(first_key_type)
6983                first_key_type_detected = True
6984        
6985        if not first_key_type_detected:
6986            items_num = max(mapping.keys(), key=lambda value: int(value))
6987    
6988    result = [None] * items_num
6989    for key, value in mapping.items():
6990        result[int(key)] = value
6991    
6992    return result
def intenum_list_to_dict( data_list: typing.List, int_enum_class: typing.Union[typing.Type, NoneType] = None) -> Dict:
6995def intenum_list_to_dict(data_list: List, int_enum_class: Optional[Type] = None) -> Dict:
6996    if int_enum_class:
6997        return {int_enum_class(key): value for key, value in enumerate(data_list)}
6998    else:
6999        return {key: value for key, value in enumerate(data_list)}
class Tensor:
78    class Tensor:
79        def numpy(self) -> np.ndarray:
80            raise NotImplementedError
def numpy(self) -> numpy.ndarray:
79        def numpy(self) -> np.ndarray:
80            raise NotImplementedError
def from_numpy( numpy_ndarray: numpy.ndarray) -> Tensor:
82    def from_numpy(numpy_ndarray: np.ndarray) -> Tensor:
83        raise NotImplementedError